showPrivileges(HivePrincipal principal,
+ HivePrivilegeObject privObj) throws HiveAuthorizationPluginException {
+ return accessController.showPrivileges(principal, privObj);
+ }
+
+ @Override
+ public VERSION getVersion() {
+ return VERSION.V1;
+ }
+
+
+ // other access control functions
+
+// void validateAuthority(HiveAction, inputs, outputs){
+// authValidator.validateAuthority(HiveAction, inputs, outputs);
+// }
+}
Index: common/src/java/org/apache/hive/security/HivePrincipal.java
===================================================================
--- common/src/java/org/apache/hive/security/HivePrincipal.java (revision 0)
+++ common/src/java/org/apache/hive/security/HivePrincipal.java (revision 0)
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.security;
+
+/**
+ * Represents the user or role in grant/revoke statements
+ */
+public class HivePrincipal {
+
+ public enum HivePrincipalType{
+ USER, ROLE, UNKNOWN
+ }
+
+ private final String name;
+ private final HivePrincipalType type;
+
+ public HivePrincipal(String name, HivePrincipalType type){
+ this.name = name;
+ this.type = type;
+ }
+ public String getName() {
+ return name;
+ }
+ public HivePrincipalType getType() {
+ return type;
+ }
+
+}
Index: common/src/java/org/apache/hive/security/HiveOperationType.java
===================================================================
--- common/src/java/org/apache/hive/security/HiveOperationType.java (revision 0)
+++ common/src/java/org/apache/hive/security/HiveOperationType.java (revision 0)
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.security;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+
+/**
+ * List of hive operations types.
+ */
+@Public
+public enum HiveOperationType {
+ EXPLAIN,
+ LOAD,
+ EXPORT,
+ IMPORT,
+ CREATEDATABASE,
+ DROPDATABASE,
+ SWITCHDATABASE,
+ LOCKDB,
+ UNLOCKDB,
+ DROPTABLE ,
+ DESCTABLE,
+ DESCFUNCTION,
+ MSCK,
+ ALTERTABLE_ADDCOLS,
+ ALTERTABLE_REPLACECOLS,
+ ALTERTABLE_RENAMECOL,
+ ALTERTABLE_RENAMEPART,
+ ALTERTABLE_RENAME,
+ ALTERTABLE_DROPPARTS,
+ ALTERTABLE_ADDPARTS,
+ ALTERTABLE_TOUCH,
+ ALTERTABLE_ARCHIVE,
+ ALTERTABLE_UNARCHIVE,
+ ALTERTABLE_PROPERTIES,
+ ALTERTABLE_SERIALIZER,
+ ALTERPARTITION_SERIALIZER,
+ ALTERTABLE_SERDEPROPERTIES,
+ ALTERPARTITION_SERDEPROPERTIES,
+ ALTERTABLE_CLUSTER_SORT,
+ ANALYZE_TABLE,
+ ALTERTABLE_BUCKETNUM,
+ ALTERPARTITION_BUCKETNUM,
+ SHOWDATABASES,
+ SHOWTABLES,
+ SHOWCOLUMNS,
+ SHOW_TABLESTATUS,
+ SHOW_TBLPROPERTIES,
+ SHOW_CREATETABLE,
+ SHOWFUNCTIONS,
+ SHOWINDEXES,
+ SHOWPARTITIONS,
+ SHOWLOCKS,
+ CREATEFUNCTION,
+ DROPFUNCTION,
+ CREATEMACRO,
+ DROPMACRO,
+ CREATEVIEW,
+ DROPVIEW,
+ CREATEINDEX,
+ DROPINDEX,
+ ALTERINDEX_REBUILD,
+ ALTERVIEW_PROPERTIES,
+ DROPVIEW_PROPERTIES,
+ LOCKTABLE,
+ UNLOCKTABLE,
+ CREATEROLE,
+ DROPROLE,
+ GRANT_PRIVILEGE,
+ REVOKE_PRIVILEGE,
+ SHOW_GRANT,
+ GRANT_ROLE,
+ REVOKE_ROLE,
+ SHOW_ROLES,
+ SHOW_ROLE_GRANT,
+ ALTERTABLE_PROTECTMODE,
+ ALTERPARTITION_PROTECTMODE,
+ ALTERTABLE_FILEFORMAT,
+ ALTERPARTITION_FILEFORMAT,
+ ALTERTABLE_LOCATION,
+ ALTERPARTITION_LOCATION,
+ CREATETABLE,
+ TRUNCATETABLE,
+ CREATETABLE_AS_SELECT,
+ QUERY,
+ ALTERINDEX_PROPS,
+ ALTERDATABASE,
+ DESCDATABASE,
+ ALTERTABLE_MERGEFILES,
+ ALTERPARTITION_MERGEFILES,
+ ALTERTABLE_SKEWED,
+ ALTERTBLPART_SKEWED_LOCATION,
+ ALTERVIEW_RENAME,
+
+}
Index: common/src/java/org/apache/hive/security/HivePrivilegeInfo.java
===================================================================
--- common/src/java/org/apache/hive/security/HivePrivilegeInfo.java (revision 0)
+++ common/src/java/org/apache/hive/security/HivePrivilegeInfo.java (revision 0)
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.security;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+
+/**
+ * Represents a privilege granted for an object to a principal
+ */
+@Public
+@Evolving
+
+public class HivePrivilegeInfo{
+ private final HivePrincipal principal;
+ private final HivePrivilege privilege;
+ private final HivePrivilegeObject object;
+ private final HivePrincipal grantorPrincipal;
+ private final boolean grantOption;
+
+ public HivePrivilegeInfo(HivePrincipal principal, HivePrivilege privilege,
+ HivePrivilegeObject object, HivePrincipal grantorPrincipal, boolean grantOption){
+ this.principal = principal;
+ this.privilege = privilege;
+ this.object = object;
+ this.grantorPrincipal = grantorPrincipal;
+ this.grantOption = grantOption;
+ }
+
+ public HivePrincipal getPrincipal() {
+ return principal;
+ }
+
+ public HivePrivilege getPrivilege() {
+ return privilege;
+ }
+
+ public HivePrivilegeObject getObject() {
+ return object;
+ }
+
+ public HivePrincipal getGrantorPrincipal() {
+ return grantorPrincipal;
+ }
+
+ public boolean isGrantOption() {
+ return grantOption;
+ }
+
+
+}
\ No newline at end of file
Index: common/src/java/org/apache/hive/common/util/HiveStringUtils.java
===================================================================
--- common/src/java/org/apache/hive/common/util/HiveStringUtils.java (revision 1560846)
+++ common/src/java/org/apache/hive/common/util/HiveStringUtils.java (working copy)
@@ -55,7 +55,7 @@
* Priority of the StringUtils shutdown hook.
*/
public static final int SHUTDOWN_HOOK_PRIORITY = 0;
-
+ public static final String LINE_SEP = System.getProperty("line.separator");
private static final DecimalFormat decimalFormat;
static {
NumberFormat numberFormat = NumberFormat.getNumberInstance(Locale.ENGLISH);
@@ -826,4 +826,5 @@
}
return len;
}
+
}
Index: common/src/java/org/apache/hive/parse/ASTNodeOrigin.java
===================================================================
--- common/src/java/org/apache/hive/parse/ASTNodeOrigin.java (revision 0)
+++ common/src/java/org/apache/hive/parse/ASTNodeOrigin.java (revision 0)
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.parse;
+
+
+/**
+ * ASTNodeOrigin contains contextual information about the object from whose
+ * definition a particular ASTNode originated. For example, suppose a view v is
+ * defined as select x+1 as y from t, and we're processing a query
+ * select v1.y from v as v1, and there's a type-checking problem
+ * with the expression x+1 due to an ALTER TABLE on t subsequent to
+ * the creation of v. Then, when reporting the error, we want to provide the
+ * parser location with respect to the definition of v (rather than with respect
+ * to the top-level query, since that represents a completely different
+ * "parser coordinate system").
+ *
+ *
+ *
+ * So, when expanding the definition of v while analyzing the top-level query,
+ * we tag each ASTNode with a reference to an ASTNodeOrign describing v and its
+ * usage within the query.
+ */
+public class ASTNodeOrigin {
+ private final String objectType;
+ private final String objectName;
+ private final String objectDefinition;
+ private final String usageAlias;
+ private final ASTNode usageNode;
+
+ public ASTNodeOrigin(String objectType, String objectName,
+ String objectDefinition, String usageAlias, ASTNode usageNode) {
+ this.objectType = objectType;
+ this.objectName = objectName;
+ this.objectDefinition = objectDefinition;
+ this.usageAlias = usageAlias;
+ this.usageNode = usageNode;
+ }
+
+ /**
+ * @return the type of the object from which an ASTNode originated, e.g.
+ * "view".
+ */
+ public String getObjectType() {
+ return objectType;
+ }
+
+ /**
+ * @return the name of the object from which an ASTNode originated, e.g. "v".
+ */
+ public String getObjectName() {
+ return objectName;
+ }
+
+ /**
+ * @return the definition of the object from which an ASTNode originated, e.g.
+ * select x+1 as y from t.
+ */
+ public String getObjectDefinition() {
+ return objectDefinition;
+ }
+
+ /**
+ * @return the alias of the object from which an ASTNode originated, e.g. "v1"
+ * (this can help with debugging context-dependent expansions)
+ */
+ public String getUsageAlias() {
+ return usageAlias;
+ }
+
+ /**
+ * @return the expression node triggering usage of an object from which an
+ * ASTNode originated, e.g. v as v1 (this can help with
+ * debugging context-dependent expansions)
+ */
+ public ASTNode getUsageNode() {
+ return usageNode;
+ }
+}
+
+// End ASTNodeOrigin.java
Index: common/src/java/org/apache/hive/parse/Node.java
===================================================================
--- common/src/java/org/apache/hive/parse/Node.java (revision 0)
+++ common/src/java/org/apache/hive/parse/Node.java (revision 0)
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.parse;
+
+import java.util.List;
+
+/**
+ * This interface defines the functions needed by the walkers and dispatchers.
+ * These are implemented by the node of the graph that needs to be walked.
+ */
+public interface Node {
+
+ /**
+ * Gets the vector of children nodes. This is used in the graph walker
+ * algorithms.
+ *
+ * @return List extends Node>
+ */
+ List extends Node> getChildren();
+
+ /**
+ * Gets the name of the node. This is used in the rule dispatchers.
+ *
+ * @return String
+ */
+ String getName();
+}
Index: common/src/java/org/apache/hive/parse/ASTNode.java
===================================================================
--- common/src/java/org/apache/hive/parse/ASTNode.java (revision 0)
+++ common/src/java/org/apache/hive/parse/ASTNode.java (revision 0)
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.parse;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+
+import org.antlr.runtime.Token;
+import org.antlr.runtime.tree.CommonTree;
+import org.antlr.runtime.tree.Tree;
+import org.apache.hive.parse.ASTNodeOrigin;
+
+/**
+ *
+ */
+public class ASTNode extends CommonTree implements Node,Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private transient ASTNodeOrigin origin;
+
+ public ASTNode() {
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param t
+ * Token for the CommonTree Node
+ */
+ public ASTNode(Token t) {
+ super(t);
+ }
+
+ public ASTNode(ASTNode node) {
+ super(node);
+ this.origin = node.origin;
+ }
+
+ @Override
+ public Tree dupNode() {
+ return new ASTNode(this);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.apache.hadoop.hive.ql.lib.Node#getChildren()
+ */
+ @Override
+ public ArrayList getChildren() {
+ if (super.getChildCount() == 0) {
+ return null;
+ }
+
+ ArrayList ret_vec = new ArrayList();
+ for (int i = 0; i < super.getChildCount(); ++i) {
+ ret_vec.add((Node) super.getChild(i));
+ }
+
+ return ret_vec;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.apache.hadoop.hive.ql.lib.Node#getName()
+ */
+ public String getName() {
+ return (Integer.valueOf(super.getToken().getType())).toString();
+ }
+
+ /**
+ * @return information about the object from which this ASTNode originated, or
+ * null if this ASTNode was not expanded from an object reference
+ */
+ public ASTNodeOrigin getOrigin() {
+ return origin;
+ }
+
+ /**
+ * Tag this ASTNode with information about the object from which this node
+ * originated.
+ */
+ public void setOrigin(ASTNodeOrigin origin) {
+ this.origin = origin;
+ }
+
+ public String dump() {
+ StringBuilder sb = new StringBuilder();
+
+ sb.append('(');
+ sb.append(toString());
+ ArrayList children = getChildren();
+ if (children != null) {
+ for (Node node : getChildren()) {
+ if (node instanceof ASTNode) {
+ sb.append(((ASTNode) node).dump());
+ } else {
+ sb.append("NON-ASTNODE!!");
+ }
+ }
+ }
+ sb.append(')');
+ return sb.toString();
+ }
+
+}
Index: common/src/java/org/apache/hive/exception/ErrorMsg.java
===================================================================
--- common/src/java/org/apache/hive/exception/ErrorMsg.java (revision 0)
+++ common/src/java/org/apache/hive/exception/ErrorMsg.java (revision 0)
@@ -0,0 +1,709 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.exception;
+
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hive.common.util.HiveStringUtils;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.ASTNodeOrigin;
+
+/**
+ * List of all error messages.
+ * This list contains both compile time and run-time errors.
+ *
+ * This class supports parametrized messages such as (@link #TRUNCATE_FOR_NON_MANAGED_TABLE}. These are
+ * preferable over un-parametrized ones where arbitrary String is appended to the end of the message,
+ * for example {@link #getMsg(String)} and {@link #INVALID_TABLE}.
+ */
+
+public enum ErrorMsg {
+ // The error codes are Hive-specific and partitioned into the following ranges:
+ // 10000 to 19999: Errors occurring during semantic analysis and compilation of the query.
+ // 20000 to 29999: Runtime errors where Hive believes that retries are unlikely to succeed.
+ // 30000 to 39999: Runtime errors which Hive thinks may be transient and retrying may succeed.
+ // 40000 to 49999: Errors where Hive is unable to advise about retries.
+ // In addition to the error code, ErrorMsg also has a SQLState field.
+ // SQLStates are taken from Section 22.1 of ISO-9075.
+ // See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt
+ // Most will just rollup to the generic syntax error state of 42000, but
+ // specific errors can override the that state.
+ // See this page for how MySQL uses SQLState codes:
+ // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html
+ GENERIC_ERROR(40000, "Exception while processing"),
+
+ //========================== 10000 range starts here ========================//
+ INVALID_TABLE(10001, "Table not found", "42S02"),
+ INVALID_COLUMN(10002, "Invalid column reference"),
+ INVALID_INDEX(10003, "Invalid index"),
+ INVALID_TABLE_OR_COLUMN(10004, "Invalid table alias or column reference"),
+ AMBIGUOUS_TABLE_OR_COLUMN(10005, "Ambiguous table alias or column reference"),
+ INVALID_PARTITION(10006, "Partition not found"),
+ AMBIGUOUS_COLUMN(10007, "Ambiguous column reference"),
+ AMBIGUOUS_TABLE_ALIAS(10008, "Ambiguous table alias"),
+ INVALID_TABLE_ALIAS(10009, "Invalid table alias"),
+ NO_TABLE_ALIAS(10010, "No table alias"),
+ INVALID_FUNCTION(10011, "Invalid function"),
+ INVALID_FUNCTION_SIGNATURE(10012, "Function argument type mismatch"),
+ INVALID_OPERATOR_SIGNATURE(10013, "Operator argument type mismatch"),
+ INVALID_ARGUMENT(10014, "Wrong arguments"),
+ INVALID_ARGUMENT_LENGTH(10015, "Arguments length mismatch", "21000"),
+ INVALID_ARGUMENT_TYPE(10016, "Argument type mismatch"),
+ INVALID_JOIN_CONDITION_1(10017, "Both left and right aliases encountered in JOIN"),
+ INVALID_JOIN_CONDITION_2(10018, "Neither left nor right aliases encountered in JOIN"),
+ INVALID_JOIN_CONDITION_3(10019, "OR not supported in JOIN currently"),
+ INVALID_TRANSFORM(10020, "TRANSFORM with other SELECT columns not supported"),
+ DUPLICATE_GROUPBY_KEY(10021, "Repeated key in GROUP BY"),
+ UNSUPPORTED_MULTIPLE_DISTINCTS(10022, "DISTINCT on different columns not supported" +
+ " with skew in data"),
+ NO_SUBQUERY_ALIAS(10023, "No alias for subquery"),
+ NO_INSERT_INSUBQUERY(10024, "Cannot insert in a subquery. Inserting to table "),
+ NON_KEY_EXPR_IN_GROUPBY(10025, "Expression not in GROUP BY key"),
+ INVALID_XPATH(10026, "General . and [] operators are not supported"),
+ INVALID_PATH(10027, "Invalid path"),
+ ILLEGAL_PATH(10028, "Path is not legal"),
+ INVALID_NUMERICAL_CONSTANT(10029, "Invalid numerical constant"),
+ INVALID_ARRAYINDEX_CONSTANT(10030, "Non-constant expressions for array indexes not supported"),
+ INVALID_MAPINDEX_CONSTANT(10031, "Non-constant expression for map indexes not supported"),
+ INVALID_MAPINDEX_TYPE(10032, "MAP key type does not match index expression type"),
+ NON_COLLECTION_TYPE(10033, "[] not valid on non-collection types"),
+ SELECT_DISTINCT_WITH_GROUPBY(10034, "SELECT DISTINCT and GROUP BY can not be in the same query"),
+ COLUMN_REPEATED_IN_PARTITIONING_COLS(10035, "Column repeated in partitioning columns"),
+ DUPLICATE_COLUMN_NAMES(10036, "Duplicate column name:"),
+ INVALID_BUCKET_NUMBER(10037, "Bucket number should be bigger than zero"),
+ COLUMN_REPEATED_IN_CLUSTER_SORT(10038, "Same column cannot appear in CLUSTER BY and SORT BY"),
+ SAMPLE_RESTRICTION(10039, "Cannot SAMPLE on more than two columns"),
+ SAMPLE_COLUMN_NOT_FOUND(10040, "SAMPLE column not found"),
+ NO_PARTITION_PREDICATE(10041, "No partition predicate found"),
+ INVALID_DOT(10042, ". Operator is only supported on struct or list of struct types"),
+ INVALID_TBL_DDL_SERDE(10043, "Either list of columns or a custom serializer should be specified"),
+ TARGET_TABLE_COLUMN_MISMATCH(10044,
+ "Cannot insert into target table because column number/types are different"),
+ TABLE_ALIAS_NOT_ALLOWED(10045, "Table alias not allowed in sampling clause"),
+ CLUSTERBY_DISTRIBUTEBY_CONFLICT(10046, "Cannot have both CLUSTER BY and DISTRIBUTE BY clauses"),
+ ORDERBY_DISTRIBUTEBY_CONFLICT(10047, "Cannot have both ORDER BY and DISTRIBUTE BY clauses"),
+ CLUSTERBY_SORTBY_CONFLICT(10048, "Cannot have both CLUSTER BY and SORT BY clauses"),
+ ORDERBY_SORTBY_CONFLICT(10049, "Cannot have both ORDER BY and SORT BY clauses"),
+ CLUSTERBY_ORDERBY_CONFLICT(10050, "Cannot have both CLUSTER BY and ORDER BY clauses"),
+ NO_LIMIT_WITH_ORDERBY(10051, "In strict mode, if ORDER BY is specified, "
+ + "LIMIT must also be specified"),
+ NO_CARTESIAN_PRODUCT(10052, "In strict mode, cartesian product is not allowed. "
+ + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"),
+ UNION_NOTIN_SUBQ(10053, "Top level UNION is not supported currently; "
+ + "use a subquery for the UNION"),
+ INVALID_INPUT_FORMAT_TYPE(10054, "Input format must implement InputFormat"),
+ INVALID_OUTPUT_FORMAT_TYPE(10055, "Output Format must implement HiveOutputFormat, "
+ + "otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"),
+ NO_VALID_PARTN(10056, "The query does not reference any valid partition. "
+ + "To run this query, set hive.mapred.mode=nonstrict"),
+ NO_OUTER_MAPJOIN(10057, "MAPJOIN cannot be performed with OUTER JOIN"),
+ INVALID_MAPJOIN_HINT(10058, "All tables are specified as map-table for join"),
+ INVALID_MAPJOIN_TABLE(10059, "Result of a union cannot be a map table"),
+ NON_BUCKETED_TABLE(10060, "Sampling expression needed for non-bucketed table"),
+ BUCKETED_NUMERATOR_BIGGER_DENOMINATOR(10061, "Numerator should not be bigger than "
+ + "denominator in sample clause for table"),
+ NEED_PARTITION_ERROR(10062, "Need to specify partition columns because the destination "
+ + "table is partitioned"),
+ CTAS_CTLT_COEXISTENCE(10063, "Create table command does not allow LIKE and AS-SELECT in "
+ + "the same command"),
+ LINES_TERMINATED_BY_NON_NEWLINE(10064, "LINES TERMINATED BY only supports "
+ + "newline '\\n' right now"),
+ CTAS_COLLST_COEXISTENCE(10065, "CREATE TABLE AS SELECT command cannot specify "
+ + "the list of columns "
+ + "for the target table"),
+ CTLT_COLLST_COEXISTENCE(10066, "CREATE TABLE LIKE command cannot specify the list of columns for "
+ + "the target table"),
+ INVALID_SELECT_SCHEMA(10067, "Cannot derive schema from the select-clause"),
+ CTAS_PARCOL_COEXISTENCE(10068, "CREATE-TABLE-AS-SELECT does not support "
+ + "partitioning in the target table "),
+ CTAS_MULTI_LOADFILE(10069, "CREATE-TABLE-AS-SELECT results in multiple file load"),
+ CTAS_EXTTBL_COEXISTENCE(10070, "CREATE-TABLE-AS-SELECT cannot create external table"),
+ INSERT_EXTERNAL_TABLE(10071, "Inserting into a external table is not allowed"),
+ DATABASE_NOT_EXISTS(10072, "Database does not exist:"),
+ TABLE_ALREADY_EXISTS(10073, "Table already exists:", "42S02"),
+ COLUMN_ALIAS_ALREADY_EXISTS(10074, "Column alias already exists:", "42S02"),
+ UDTF_MULTIPLE_EXPR(10075, "Only a single expression in the SELECT clause is "
+ + "supported with UDTF's"),
+ @Deprecated UDTF_REQUIRE_AS(10076, "UDTF's require an AS clause"),
+ UDTF_NO_GROUP_BY(10077, "GROUP BY is not supported with a UDTF in the SELECT clause"),
+ UDTF_NO_SORT_BY(10078, "SORT BY is not supported with a UDTF in the SELECT clause"),
+ UDTF_NO_CLUSTER_BY(10079, "CLUSTER BY is not supported with a UDTF in the SELECT clause"),
+ UDTF_NO_DISTRIBUTE_BY(10080, "DISTRUBTE BY is not supported with a UDTF in the SELECT clause"),
+ UDTF_INVALID_LOCATION(10081, "UDTF's are not supported outside the SELECT clause, nor nested "
+ + "in expressions"),
+ UDTF_LATERAL_VIEW(10082, "UDTF's cannot be in a select expression when there is a lateral view"),
+ UDTF_ALIAS_MISMATCH(10083, "The number of aliases supplied in the AS clause does not match the "
+ + "number of columns output by the UDTF"),
+ UDF_STATEFUL_INVALID_LOCATION(10084, "Stateful UDF's can only be invoked in the SELECT list"),
+ LATERAL_VIEW_WITH_JOIN(10085, "JOIN with a LATERAL VIEW is not supported"),
+ LATERAL_VIEW_INVALID_CHILD(10086, "LATERAL VIEW AST with invalid child"),
+ OUTPUT_SPECIFIED_MULTIPLE_TIMES(10087, "The same output cannot be present multiple times: "),
+ INVALID_AS(10088, "AS clause has an invalid number of aliases"),
+ VIEW_COL_MISMATCH(10089, "The number of columns produced by the SELECT clause does not match the "
+ + "number of column names specified by CREATE VIEW"),
+ DML_AGAINST_VIEW(10090, "A view cannot be used as target table for LOAD or INSERT"),
+ ANALYZE_VIEW(10091, "ANALYZE is not supported for views"),
+ VIEW_PARTITION_TOTAL(10092, "At least one non-partitioning column must be present in view"),
+ VIEW_PARTITION_MISMATCH(10093, "Rightmost columns in view output do not match "
+ + "PARTITIONED ON clause"),
+ PARTITION_DYN_STA_ORDER(10094, "Dynamic partition cannot be the parent of a static partition"),
+ DYNAMIC_PARTITION_DISABLED(10095, "Dynamic partition is disabled. Either enable it by setting "
+ + "hive.exec.dynamic.partition=true or specify partition column values"),
+ DYNAMIC_PARTITION_STRICT_MODE(10096, "Dynamic partition strict mode requires at least one "
+ + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"),
+ NONEXISTPARTCOL(10098, "Non-Partition column appears in the partition specification: "),
+ UNSUPPORTED_TYPE(10099, "DATETIME type isn't supported yet. Please use "
+ + "DATE or TIMESTAMP instead"),
+ CREATE_NON_NATIVE_AS(10100, "CREATE TABLE AS SELECT cannot be used for a non-native table"),
+ LOAD_INTO_NON_NATIVE(10101, "A non-native table cannot be used as target for LOAD"),
+ LOCKMGR_NOT_SPECIFIED(10102, "Lock manager not specified correctly, set hive.lock.manager"),
+ LOCKMGR_NOT_INITIALIZED(10103, "Lock manager could not be initialized, check hive.lock.manager "),
+ LOCK_CANNOT_BE_ACQUIRED(10104, "Locks on the underlying objects cannot be acquired. "
+ + "retry after some time"),
+ ZOOKEEPER_CLIENT_COULD_NOT_BE_INITIALIZED(10105, "Check hive.zookeeper.quorum "
+ + "and hive.zookeeper.client.port"),
+ OVERWRITE_ARCHIVED_PART(10106, "Cannot overwrite an archived partition. " +
+ "Unarchive before running this command"),
+ ARCHIVE_METHODS_DISABLED(10107, "Archiving methods are currently disabled. " +
+ "Please see the Hive wiki for more information about enabling archiving"),
+ ARCHIVE_ON_MULI_PARTS(10108, "ARCHIVE can only be run on a single partition"),
+ UNARCHIVE_ON_MULI_PARTS(10109, "ARCHIVE can only be run on a single partition"),
+ ARCHIVE_ON_TABLE(10110, "ARCHIVE can only be run on partitions"),
+ RESERVED_PART_VAL(10111, "Partition value contains a reserved substring"),
+ HOLD_DDLTIME_ON_NONEXIST_PARTITIONS(10112, "HOLD_DDLTIME hint cannot be applied to dynamic " +
+ "partitions or non-existent partitions"),
+ OFFLINE_TABLE_OR_PARTITION(10113, "Query against an offline table or partition"),
+ OUTERJOIN_USES_FILTERS(10114, "The query results could be wrong. " +
+ "Turn on hive.outerjoin.supports.filters"),
+ NEED_PARTITION_SPECIFICATION(10115, "Table is partitioned and partition specification is needed"),
+ INVALID_METADATA(10116, "The metadata file could not be parsed "),
+ NEED_TABLE_SPECIFICATION(10117, "Table name could be determined; It should be specified "),
+ PARTITION_EXISTS(10118, "Partition already exists"),
+ TABLE_DATA_EXISTS(10119, "Table exists and contains data files"),
+ INCOMPATIBLE_SCHEMA(10120, "The existing table is not compatible with the import spec. "),
+ EXIM_FOR_NON_NATIVE(10121, "Export/Import cannot be done for a non-native table. "),
+ INSERT_INTO_BUCKETIZED_TABLE(10122, "Bucketized tables do not support INSERT INTO:"),
+ NO_COMPARE_BIGINT_STRING(10123, "In strict mode, comparing bigints and strings is not allowed, "
+ + "it may result in a loss of precision. "
+ + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"),
+ NO_COMPARE_BIGINT_DOUBLE(10124, "In strict mode, comparing bigints and doubles is not allowed, "
+ + "it may result in a loss of precision. "
+ + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"),
+ PARTSPEC_DIFFER_FROM_SCHEMA(10125, "Partition columns in partition specification are "
+ + "not the same as that defined in the table schema. "
+ + "The names and orders have to be exactly the same."),
+ PARTITION_COLUMN_NON_PRIMITIVE(10126, "Partition column must be of primitive type."),
+ INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS(10127,
+ "Dynamic partitions do not support IF NOT EXISTS. Specified partitions with value :"),
+ UDAF_INVALID_LOCATION(10128, "Not yet supported place for UDAF"),
+ DROP_PARTITION_NON_STRING_PARTCOLS_NONEQUALITY(10129,
+ "Drop partitions for a non string partition columns is not allowed using non-equality"),
+ ALTER_COMMAND_FOR_VIEWS(10131, "To alter a view you need to use the ALTER VIEW command."),
+ ALTER_COMMAND_FOR_TABLES(10132, "To alter a base table you need to use the ALTER TABLE command."),
+ ALTER_VIEW_DISALLOWED_OP(10133, "Cannot use this form of ALTER on a view"),
+ ALTER_TABLE_NON_NATIVE(10134, "ALTER TABLE cannot be used for a non-native table"),
+ SORTMERGE_MAPJOIN_FAILED(10135,
+ "Sort merge bucketed join could not be performed. " +
+ "If you really want to perform the operation, either set " +
+ "hive.optimize.bucketmapjoin.sortedmerge=false, or set " +
+ "hive.enforce.sortmergebucketmapjoin=false."),
+ BUCKET_MAPJOIN_NOT_POSSIBLE(10136,
+ "Bucketed mapjoin cannot be performed. " +
+ "This can be due to multiple reasons: " +
+ " . Join columns dont match bucketed columns. " +
+ " . Number of buckets are not a multiple of each other. " +
+ "If you really want to perform the operation, either remove the " +
+ "mapjoin hint from your query or set hive.enforce.bucketmapjoin to false."),
+
+ BUCKETED_TABLE_METADATA_INCORRECT(10141,
+ "Bucketed table metadata is not correct. " +
+ "Fix the metadata or don't use bucketed mapjoin, by setting " +
+ "hive.enforce.bucketmapjoin to false."),
+
+ JOINNODE_OUTERJOIN_MORETHAN_16(10142, "Single join node containing outer join(s) " +
+ "cannot have more than 16 aliases"),
+
+ INVALID_JDO_FILTER_EXPRESSION(10143, "Invalid expression for JDO filter"),
+
+ SHOW_CREATETABLE_INDEX(10144, "SHOW CREATE TABLE does not support tables of type INDEX_TABLE."),
+ ALTER_BUCKETNUM_NONBUCKETIZED_TBL(10145, "Table is not bucketized."),
+
+ TRUNCATE_FOR_NON_MANAGED_TABLE(10146, "Cannot truncate non-managed table {0}.", true),
+ TRUNCATE_FOR_NON_NATIVE_TABLE(10147, "Cannot truncate non-native table {0}.", true),
+ PARTSPEC_FOR_NON_PARTITIONED_TABLE(10148, "Partition spec for non partitioned table {0}.", true),
+
+ LOAD_INTO_STORED_AS_DIR(10195, "A stored-as-directories table cannot be used as target for LOAD"),
+ ALTER_TBL_STOREDASDIR_NOT_SKEWED(10196, "This operation is only valid on skewed table."),
+ ALTER_TBL_SKEWED_LOC_NO_LOC(10197, "Alter table skewed location doesn't have locations."),
+ ALTER_TBL_SKEWED_LOC_NO_MAP(10198, "Alter table skewed location doesn't have location map."),
+ SUPPORT_DIR_MUST_TRUE_FOR_LIST_BUCKETING(
+ 10199,
+ "hive.mapred.supports.subdirectories must be true"
+ + " if any one of following is true: "
+ + " hive.optimize.listbucketing , mapred.input.dir.recursive"
+ + " and hive.optimize.union.remove."),
+ SKEWED_TABLE_NO_COLUMN_NAME(10200, "No skewed column name."),
+ SKEWED_TABLE_NO_COLUMN_VALUE(10201, "No skewed values."),
+ SKEWED_TABLE_DUPLICATE_COLUMN_NAMES(10202,
+ "Duplicate skewed column name:"),
+ SKEWED_TABLE_INVALID_COLUMN(10203,
+ "Invalid skewed column name:"),
+ SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_1(10204,
+ "Skewed column name is empty but skewed value is not."),
+ SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_2(10205,
+ "Skewed column value is empty but skewed name is not."),
+ SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_3(10206,
+ "The number of skewed column names and the number of " +
+ "skewed column values are different: "),
+ ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN(10207,
+ " is a skewed column. It's not allowed to rename skewed column"
+ + " or change skewed column type."),
+ HIVE_GROUPING_SETS_AGGR_NOMAPAGGR(10209,
+ "Grouping sets aggregations (with rollups or cubes) are not allowed if map-side " +
+ " aggregation is turned off. Set hive.map.aggr=true if you want to use grouping sets"),
+ HIVE_GROUPING_SETS_AGGR_EXPRESSION_INVALID(10210,
+ "Grouping sets aggregations (with rollups or cubes) are not allowed if aggregation function " +
+ "parameters overlap with the aggregation functions columns"),
+
+ HIVE_GROUPING_SETS_AGGR_NOFUNC(10211,
+ "Grouping sets aggregations are not allowed if no aggregation function is presented"),
+
+ HIVE_UNION_REMOVE_OPTIMIZATION_NEEDS_SUBDIRECTORIES(10212,
+ "In order to use hive.optimize.union.remove, the hadoop version that you are using " +
+ "should support sub-directories for tables/partitions. If that is true, set " +
+ "hive.hadoop.supports.subdirectories to true. Otherwise, set hive.optimize.union.remove " +
+ "to false"),
+
+ HIVE_GROUPING_SETS_EXPR_NOT_IN_GROUPBY(10213,
+ "Grouping sets expression is not in GROUP BY key"),
+ INVALID_PARTITION_SPEC(10214, "Invalid partition spec specified"),
+ ALTER_TBL_UNSET_NON_EXIST_PROPERTY(10215,
+ "Please use the following syntax if not sure " +
+ "whether the property existed or not:\n" +
+ "ALTER TABLE tableName UNSET TBLPROPERTIES IF EXISTS (key1, key2, ...)\n"),
+ ALTER_VIEW_AS_SELECT_NOT_EXIST(10216,
+ "Cannot ALTER VIEW AS SELECT if view currently does not exist\n"),
+ REPLACE_VIEW_WITH_PARTITION(10217,
+ "Cannot replace a view with CREATE VIEW or REPLACE VIEW or " +
+ "ALTER VIEW AS SELECT if the view has partitions\n"),
+ EXISTING_TABLE_IS_NOT_VIEW(10218,
+ "Existing table is not a view\n"),
+ NO_SUPPORTED_ORDERBY_ALLCOLREF_POS(10219,
+ "Position in ORDER BY is not supported when using SELECT *"),
+ INVALID_POSITION_ALIAS_IN_GROUPBY(10220,
+ "Invalid position alias in Group By\n"),
+ INVALID_POSITION_ALIAS_IN_ORDERBY(10221,
+ "Invalid position alias in Order By\n"),
+
+ HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_SKEW(10225,
+ "An additional MR job is introduced since the number of rows created per input row " +
+ "due to grouping sets is more than hive.new.job.grouping.set.cardinality. There is no need " +
+ "to handle skew separately. set hive.groupby.skewindata to false."),
+ HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_DISTINCTS(10226,
+ "An additional MR job is introduced since the cardinality of grouping sets " +
+ "is more than hive.new.job.grouping.set.cardinality. This functionality is not supported " +
+ "with distincts. Either set hive.new.job.grouping.set.cardinality to a high number " +
+ "(higher than the number of rows per input row due to grouping sets in the query), or " +
+ "rewrite the query to not use distincts."),
+
+ OPERATOR_NOT_ALLOWED_WITH_MAPJOIN(10227,
+ "Not all clauses are supported with mapjoin hint. Please remove mapjoin hint."),
+
+ ANALYZE_TABLE_NOSCAN_NON_NATIVE(10228, "ANALYZE TABLE NOSCAN cannot be used for "
+ + "a non-native table"),
+
+ ANALYZE_TABLE_PARTIALSCAN_NON_NATIVE(10229, "ANALYZE TABLE PARTIALSCAN cannot be used for "
+ + "a non-native table"),
+ ANALYZE_TABLE_PARTIALSCAN_NON_RCFILE(10230, "ANALYZE TABLE PARTIALSCAN doesn't "
+ + "support non-RCfile. "),
+ ANALYZE_TABLE_PARTIALSCAN_EXTERNAL_TABLE(10231, "ANALYZE TABLE PARTIALSCAN "
+ + "doesn't support external table: "),
+ ANALYZE_TABLE_PARTIALSCAN_AGGKEY(10232, "Analyze partialscan command "
+ + "fails to construct aggregation for the partition "),
+ ANALYZE_TABLE_PARTIALSCAN_AUTOGATHER(10233, "Analyze partialscan is not allowed " +
+ "if hive.stats.autogather is set to false"),
+ PARTITION_VALUE_NOT_CONTINUOUS(10234, "Parition values specifed are not continuous." +
+ " A subpartition value is specified without specififying the parent partition's value"),
+ TABLES_INCOMPATIBLE_SCHEMAS(10235, "Tables have incompatible schemas and their partitions " +
+ " cannot be exchanged."),
+
+ TRUNCATE_COLUMN_INDEXED_TABLE(10236, "Can not truncate columns from table with indexes"),
+ TRUNCATE_COLUMN_NOT_RC(10237, "Only RCFileFormat supports column truncation."),
+ TRUNCATE_COLUMN_ARCHIVED(10238, "Column truncation cannot be performed on archived partitions."),
+ TRUNCATE_BUCKETED_COLUMN(10239,
+ "A column on which a partition/table is bucketed cannot be truncated."),
+ TRUNCATE_LIST_BUCKETED_COLUMN(10240,
+ "A column on which a partition/table is list bucketed cannot be truncated."),
+
+ TABLE_NOT_PARTITIONED(10241, "Table {0} is not a partitioned table", true),
+ DATABSAE_ALREADY_EXISTS(10242, "Database {0} already exists", true),
+ CANNOT_REPLACE_COLUMNS(10243, "Replace columns is not supported for table {0}. SerDe may be incompatible.", true),
+ BAD_LOCATION_VALUE(10244, "{0} is not absolute or has no scheme information. Please specify a complete absolute uri with scheme information."),
+ UNSUPPORTED_ALTER_TBL_OP(10245, "{0} alter table options is not supported"),
+ INVALID_BIGTABLE_MAPJOIN(10246, "{0} table chosen for streaming is not valid", true),
+ MISSING_OVER_CLAUSE(10247, "Missing over clause for function : "),
+ PARTITION_SPEC_TYPE_MISMATCH(10248, "Cannot add partition column {0} of type {1} as it cannot be converted to type {2}", true),
+ UNSUPPORTED_SUBQUERY_EXPRESSION(10249, "Unsupported SubQuery Expression"),
+ INVALID_SUBQUERY_EXPRESSION(10250, "Invalid SubQuery expression"),
+
+ INVALID_HDFS_URI(10251, "{0} is not a hdfs uri", true),
+ INVALID_DIR(10252, "{0} is not a directory", true),
+ NO_VALID_LOCATIONS(10253, "Could not find any valid location to place the jars. " +
+ "Please update hive.jar.directory or hive.user.install.directory with a valid location", false),
+ UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254,
+ "Principal type GROUP is not supported in this authorization setting", "28000"),
+ INVALID_TABLE_NAME(10255, "Invalid table name {0}", true),
+
+ //========================== 20000 range starts here ========================//
+ SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."),
+ SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. "
+ + "It may have crashed with an error."),
+ SCRIPT_GENERIC_ERROR(20002, "Hive encountered some unknown error while "
+ + "running your custom script."),
+ SCRIPT_CLOSING_ERROR(20003, "An error occurred when trying to close the Operator " +
+ "running your custom script."),
+ DYNAMIC_PARTITIONS_TOO_MANY_PER_NODE_ERROR(20004, "Fatal error occurred when node " +
+ "tried to create too many dynamic partitions. The maximum number of dynamic partitions " +
+ "is controlled by hive.exec.max.dynamic.partitions and hive.exec.max.dynamic.partitions.pernode. "),
+
+ //========================== 30000 range starts here ========================//
+ STATSPUBLISHER_NOT_OBTAINED(30000, "StatsPublisher cannot be obtained. " +
+ "There was a error to retrieve the StatsPublisher, and retrying " +
+ "might help. If you dont want the query to fail because accurate statistics " +
+ "could not be collected, set hive.stats.reliable=false"),
+ STATSPUBLISHER_INITIALIZATION_ERROR(30001, "StatsPublisher cannot be initialized. " +
+ "There was a error in the initialization of StatsPublisher, and retrying " +
+ "might help. If you dont want the query to fail because accurate statistics " +
+ "could not be collected, set hive.stats.reliable=false"),
+ STATSPUBLISHER_CONNECTION_ERROR(30002, "StatsPublisher cannot be connected to." +
+ "There was a error while connecting to the StatsPublisher, and retrying " +
+ "might help. If you dont want the query to fail because accurate statistics " +
+ "could not be collected, set hive.stats.reliable=false"),
+ STATSPUBLISHER_PUBLISHING_ERROR(30003, "Error in publishing stats. There was an " +
+ "error in publishing stats via StatsPublisher, and retrying " +
+ "might help. If you dont want the query to fail because accurate statistics " +
+ "could not be collected, set hive.stats.reliable=false"),
+ STATSPUBLISHER_CLOSING_ERROR(30004, "StatsPublisher cannot be closed." +
+ "There was a error while closing the StatsPublisher, and retrying " +
+ "might help. If you dont want the query to fail because accurate statistics " +
+ "could not be collected, set hive.stats.reliable=false"),
+
+ COLUMNSTATSCOLLECTOR_INVALID_PART_KEY(30005, "Invalid partitioning key specified in ANALYZE " +
+ "statement"),
+ COLUMNSTATSCOLLECTOR_INCORRECT_NUM_PART_KEY(30006, "Incorrect number of partitioning key " +
+ "specified in ANALYZE statement"),
+ COLUMNSTATSCOLLECTOR_INVALID_PARTITION(30007, "Invalid partitioning key/value specified in " +
+ "ANALYZE statement"),
+ COLUMNSTATSCOLLECTOR_INVALID_SYNTAX(30008, "Dynamic partitioning is not supported yet while " +
+ "gathering column statistics through ANALYZE statement"),
+ COLUMNSTATSCOLLECTOR_PARSE_ERROR(30009, "Encountered parse error while parsing rewritten query"),
+ COLUMNSTATSCOLLECTOR_IO_ERROR(30010, "Encountered I/O exception while parsing rewritten query"),
+ DROP_COMMAND_NOT_ALLOWED_FOR_PARTITION(30011, "Partition protected from being dropped"),
+ COLUMNSTATSCOLLECTOR_INVALID_COLUMN(30012, "Column statistics are not supported "
+ + "for partition columns"),
+
+ STATISTICS_CLONING_FAILED(30013, "Cloning of statistics failed"),
+
+ STATSAGGREGATOR_SOURCETASK_NULL(30014, "SourceTask of StatsTask should not be null"),
+ STATSAGGREGATOR_CONNECTION_ERROR(30015,
+ "Stats aggregator of type {0} cannot be connected to", true),
+ STATSAGGREGATOR_MISSED_SOMESTATS(30016,
+ "Stats type {0} is missing from stats aggregator. If you don't want the query " +
+ "to fail because of this, set hive.stats.atomic=false", true),
+ STATS_SKIPPING_BY_ERROR(30017, "Skipping stats aggregation by error {0}", true);
+ ;
+
+ private int errorCode;
+ private String mesg;
+ private String sqlState;
+ private MessageFormat format;
+
+ private static final char SPACE = ' ';
+ private static final Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*Line [0-9]+:[0-9]+ (.*)");
+ private static final Pattern ERROR_CODE_PATTERN =
+ Pattern.compile("HiveException:\\s+\\[Error ([0-9]+)\\]: (.*)");
+ private static Map mesgToErrorMsgMap = new HashMap();
+ private static Map formatToErrorMsgMap = new HashMap();
+ private static int minMesgLength = -1;
+
+ static {
+ for (ErrorMsg errorMsg : values()) {
+ if (errorMsg.format != null) {
+ String pattern = errorMsg.mesg.replaceAll("\\{.*\\}", ".*");
+ formatToErrorMsgMap.put(Pattern.compile("^" + pattern + "$"), errorMsg);
+ } else {
+ mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg);
+ int length = errorMsg.getMsg().trim().length();
+ if (minMesgLength == -1 || length < minMesgLength) {
+ minMesgLength = length;
+ }
+ }
+ }
+ }
+
+ /**
+ * Given an error message string, returns the ErrorMsg object associated with it.
+ * @param mesg An error message string
+ * @return ErrorMsg
+ */
+ public static ErrorMsg getErrorMsg(String mesg) {
+ if (mesg == null) {
+ return GENERIC_ERROR;
+ }
+
+ // first see if there is a direct match
+ ErrorMsg errorMsg = mesgToErrorMsgMap.get(mesg);
+ if (errorMsg != null) {
+ return errorMsg;
+ }
+
+ for (Map.Entry entry : formatToErrorMsgMap.entrySet()) {
+ if (entry.getKey().matcher(mesg).matches()) {
+ return entry.getValue();
+ }
+ }
+
+ // if not see if the mesg follows type of format, which is typically the
+ // case:
+ // line 1:14 Table not found table_name
+ String truncatedMesg = mesg.trim();
+ Matcher match = ERROR_MESSAGE_PATTERN.matcher(mesg);
+ if (match.matches()) {
+ truncatedMesg = match.group(1);
+ }
+
+ // appends might exist after the root message, so strip tokens off until we
+ // match
+ while (truncatedMesg.length() > minMesgLength) {
+ errorMsg = mesgToErrorMsgMap.get(truncatedMesg.trim());
+ if (errorMsg != null) {
+ return errorMsg;
+ }
+
+ int lastSpace = truncatedMesg.lastIndexOf(SPACE);
+ if (lastSpace == -1) {
+ break;
+ }
+
+ // hack off the last word and try again
+ truncatedMesg = truncatedMesg.substring(0, lastSpace).trim();
+ }
+
+ return GENERIC_ERROR;
+ }
+
+ /**
+ * Given an error code, returns the ErrorMsg object associated with it.
+ * @param errorCode An error code
+ * @return ErrorMsg
+ */
+ public static ErrorMsg getErrorMsg(int errorCode) {
+ for (ErrorMsg errorMsg : values()) {
+ if (errorMsg.getErrorCode() == errorCode) {
+ return errorMsg;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * For a given error message string, searches for a ErrorMsg enum
+ * that appears to be a match. If a match is found, returns the
+ * SQLState associated with the ErrorMsg. If a match
+ * is not found or ErrorMsg has no SQLState, returns
+ * the SQLState bound to the GENERIC_ERROR
+ * ErrorMsg.
+ *
+ * @param mesg
+ * An error message string
+ * @return SQLState
+ */
+ public static String findSQLState(String mesg) {
+ ErrorMsg error = getErrorMsg(mesg);
+ return error.getSQLState();
+ }
+
+ private ErrorMsg(int errorCode, String mesg) {
+ this(errorCode, mesg, "42000", false);
+ }
+
+ private ErrorMsg(int errorCode, String mesg, boolean format) {
+ // 42000 is the generic SQLState for syntax error.
+ this(errorCode, mesg, "42000", format);
+ }
+
+ private ErrorMsg(int errorCode, String mesg, String sqlState) {
+ this(errorCode, mesg, sqlState, false);
+ }
+
+ private ErrorMsg(int errorCode, String mesg, String sqlState, boolean format) {
+ this.errorCode = errorCode;
+ this.mesg = mesg;
+ this.sqlState = sqlState;
+ this.format = format ? new MessageFormat(mesg) : null;
+ }
+
+ private static int getLine(ASTNode tree) {
+ if (tree.getChildCount() == 0) {
+ return tree.getToken().getLine();
+ }
+
+ return getLine((ASTNode) tree.getChild(0));
+ }
+
+ private static int getCharPositionInLine(ASTNode tree) {
+ if (tree.getChildCount() == 0) {
+ return tree.getToken().getCharPositionInLine();
+ }
+
+ return getCharPositionInLine((ASTNode) tree.getChild(0));
+ }
+
+ // Dirty hack as this will throw away spaces and other things - find a better
+ // way!
+ public static String getText(ASTNode tree) {
+ if (tree.getChildCount() == 0) {
+ return tree.getText();
+ }
+ return getText((ASTNode) tree.getChild(tree.getChildCount() - 1));
+ }
+
+ public String getMsg(ASTNode tree) {
+ StringBuilder sb = new StringBuilder();
+ renderPosition(sb, tree);
+ sb.append(" ");
+ sb.append(mesg);
+ sb.append(" '");
+ sb.append(getText(tree));
+ sb.append("'");
+ renderOrigin(sb, tree.getOrigin());
+ return sb.toString();
+ }
+
+ public static void renderOrigin(StringBuilder sb, ASTNodeOrigin origin) {
+ while (origin != null) {
+ sb.append(" in definition of ");
+ sb.append(origin.getObjectType());
+ sb.append(" ");
+ sb.append(origin.getObjectName());
+ sb.append(" [");
+ sb.append(HiveStringUtils.LINE_SEP);
+ sb.append(origin.getObjectDefinition());
+ sb.append(HiveStringUtils.LINE_SEP);
+ sb.append("] used as ");
+ sb.append(origin.getUsageAlias());
+ sb.append(" at ");
+ ASTNode usageNode = origin.getUsageNode();
+ renderPosition(sb, usageNode);
+ origin = usageNode.getOrigin();
+ }
+ }
+
+ private static void renderPosition(StringBuilder sb, ASTNode tree) {
+ sb.append("Line ");
+ sb.append(getLine(tree));
+ sb.append(":");
+ sb.append(getCharPositionInLine(tree));
+ }
+
+ public String getMsg(Tree tree) {
+ return getMsg((ASTNode) tree);
+ }
+
+ public String getMsg(ASTNode tree, String reason) {
+ return getMsg(tree) + ": " + reason;
+ }
+
+ public String getMsg(Tree tree, String reason) {
+ return getMsg((ASTNode) tree, reason);
+ }
+
+ public String getMsg(String reason) {
+ return mesg + " " + reason;
+ }
+
+ public String format(String reason) {
+ return format(new String[]{reason});
+ }
+ /**
+ * If the message is parametrized, this will fill the parameters with supplied
+ * {@code reasons}, otherwise {@code reasons} are appended at the end of the
+ * message.
+ */
+ public String format(String... reasons) {
+ /* Not all messages are parametrized even those that should have been, e.g {@link #INVALID_TABLE}.
+ INVALID_TABLE is usually used with {@link #getMsg(String)}.
+ This method can also be used with INVALID_TABLE and the like and will match getMsg(String) behavior.
+
+ Another example: {@link #INVALID_PARTITION}. Ideally you want the message to have 2 parameters one for
+ partition name one for table name. Since this is already defined w/o any parameters, one can still call
+ {@code INVALID_PARTITION.format(" "}. This way the message text will be slightly
+ different but at least the errorCode will match. Note this, should not be abused by adding anything other
+ than what should have been parameter names to keep msg text standardized.
+ */
+ if(reasons == null || reasons.length == 0) {
+ return getMsg();
+ }
+ if(format != null) {
+ return format.format(reasons);
+ }
+ if(reasons.length > 1) {
+ StringBuilder sb = new StringBuilder();
+ for(String re : reasons) {
+ if(re != null) {
+ if(sb.length() > 0) {
+ sb.append(" ");
+ }
+ sb.append(re);
+ }
+ }
+ return getMsg(sb.toString());
+ }
+ return getMsg(reasons[0]);
+ }
+
+ public String getErrorCodedMsg() {
+ return "[Error " + errorCode + "]: " + mesg;
+ }
+
+ public String getErrorCodedMsg(String... reasons) {
+ return "[Error " + errorCode + "]: " + format(reasons);
+ }
+
+ public static Pattern getErrorCodePattern() {
+ return ERROR_CODE_PATTERN;
+ }
+
+ public String getMsg() {
+ return mesg;
+ }
+
+ public String getSQLState() {
+ return sqlState;
+ }
+
+ public int getErrorCode() {
+ return errorCode;
+ }
+}
Index: common/src/java/org/apache/hive/exception/HiveAuthorizationPluginException.java
===================================================================
--- common/src/java/org/apache/hive/exception/HiveAuthorizationPluginException.java (revision 0)
+++ common/src/java/org/apache/hive/exception/HiveAuthorizationPluginException.java (revision 0)
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.exception;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+
+/**
+ * Exception thrown by the Authorization plugin api (v2)
+ */
+@Public
+public class HiveAuthorizationPluginException extends HiveException{
+
+ private static final long serialVersionUID = 1L;
+
+ public HiveAuthorizationPluginException(){
+ }
+
+ public HiveAuthorizationPluginException(String msg){
+ super(msg);
+ }
+
+ public HiveAuthorizationPluginException(String msg, Throwable cause){
+ super(msg, cause);
+ }
+
+ public HiveAuthorizationPluginException(Throwable cause){
+ super(cause);
+ }
+
+}
Index: common/src/java/org/apache/hive/exception/HiveException.java
===================================================================
--- common/src/java/org/apache/hive/exception/HiveException.java (revision 0)
+++ common/src/java/org/apache/hive/exception/HiveException.java (revision 0)
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.exception;
+
+
+/**
+ * Generic exception class for Hive.
+ */
+
+public class HiveException extends Exception {
+ /**
+ * Standard predefined message with error code and possibly SQL State, etc.
+ */
+ private ErrorMsg canonicalErrorMsg = ErrorMsg.GENERIC_ERROR;
+ public HiveException() {
+ super();
+ }
+
+ public HiveException(String message) {
+ super(message);
+ }
+
+ public HiveException(Throwable cause) {
+ super(cause);
+ }
+
+ public HiveException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public HiveException(ErrorMsg message, String... msgArgs) {
+ this(null, message, msgArgs);
+ }
+
+ /**
+ * This is the recommended constructor to use since it helps use
+ * canonical messages throughout.
+ * @param errorMsg Canonical error message
+ * @param msgArgs message arguments if message is parametrized; must be {@code null} is message takes no arguments
+ */
+ public HiveException(Throwable cause, ErrorMsg errorMsg, String... msgArgs) {
+ super(errorMsg.format(msgArgs), cause);
+ canonicalErrorMsg = errorMsg;
+
+ }
+ /**
+ * @return {@link ErrorMsg#GENERIC_ERROR} by default
+ */
+ public ErrorMsg getCanonicalErrorMsg() {
+ return canonicalErrorMsg;
+ }
+}
Index: common/pom.xml
===================================================================
--- common/pom.xml (revision 1560846)
+++ common/pom.xml (working copy)
@@ -41,6 +41,16 @@
+ org.antlr
+ antlr-runtime
+ ${antlr.version}
+
+
+ org.antlr
+ ST4
+ ${ST4.version}
+
+
commons-cli
commons-cli
${commons-cli.version}
Index: contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java
===================================================================
--- contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (revision 1560846)
+++ contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (working copy)
@@ -23,12 +23,12 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDTFExplode2.
Index: contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
===================================================================
--- contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java (revision 1560846)
+++ contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java (working copy)
@@ -21,12 +21,12 @@
import java.util.ArrayList;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
Index: contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java
===================================================================
--- contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java (revision 1560846)
+++ contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFDBOutput is designed to output data directly from Hive to a JDBC
Index: service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
===================================================================
--- service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java (revision 1560846)
+++ service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java (working copy)
@@ -31,7 +31,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hive.exception.ErrorMsg;
import org.apache.hive.service.Service;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.auth.PlainSaslHelper;
Index: service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
===================================================================
--- service/src/test/org/apache/hive/service/cli/CLIServiceTest.java (revision 1560846)
+++ service/src/test/org/apache/hive/service/cli/CLIServiceTest.java (working copy)
@@ -29,7 +29,7 @@
import java.util.Map;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hive.exception.ErrorMsg;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
Index: service/src/java/org/apache/hive/service/cli/CLIService.java
===================================================================
--- service/src/java/org/apache/hive/service/cli/CLIService.java (revision 1560846)
+++ service/src/java/org/apache/hive/service/cli/CLIService.java (working copy)
@@ -33,8 +33,8 @@
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.exception.HiveException;
import org.apache.hive.service.CompositeService;
import org.apache.hive.service.ServiceException;
import org.apache.hive.service.auth.HiveAuthFactory;
Index: service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
===================================================================
--- service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java (revision 1560846)
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java (working copy)
@@ -22,9 +22,9 @@
import java.util.Map;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.exception.HiveException;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
Index: ql/src/test/results/clientpositive/authorization_1_sql_std.q.out
===================================================================
--- ql/src/test/results/clientpositive/authorization_1_sql_std.q.out (revision 0)
+++ ql/src/test/results/clientpositive/authorization_1_sql_std.q.out (revision 0)
@@ -0,0 +1,93 @@
+PREHOOK: query: create table src_autho_test (key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_autho_test (key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: --table grant to user
+
+grant select on table src_autho_test to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: --table grant to user
+
+grant select on table src_autho_test to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: show grant user hive_test_user on table src_autho_test
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user on table src_autho_test
+POSTHOOK: type: SHOW_GRANT
+database default
+table src_autho_test
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+PREHOOK: query: revoke select on table src_autho_test from user hive_test_user
+PREHOOK: type: REVOKE_PRIVILEGE
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: revoke select on table src_autho_test from user hive_test_user
+POSTHOOK: type: REVOKE_PRIVILEGE
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: show grant user hive_test_user on table src_autho_test
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user on table src_autho_test
+POSTHOOK: type: SHOW_GRANT
+PREHOOK: query: --role
+create role src_role
+PREHOOK: type: CREATEROLE
+POSTHOOK: query: --role
+create role src_role
+POSTHOOK: type: CREATEROLE
+PREHOOK: query: grant role src_role to user hive_test_user
+PREHOOK: type: GRANT_ROLE
+POSTHOOK: query: grant role src_role to user hive_test_user
+POSTHOOK: type: GRANT_ROLE
+PREHOOK: query: show role grant user hive_test_user
+PREHOOK: type: SHOW_ROLE_GRANT
+POSTHOOK: query: show role grant user hive_test_user
+POSTHOOK: type: SHOW_ROLE_GRANT
+src_role
+
+PREHOOK: query: --table grant to role
+
+grant select on table src_autho_test to role src_role
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: --table grant to role
+
+grant select on table src_autho_test to role src_role
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: show grant role src_role on table src_autho_test
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant role src_role on table src_autho_test
+POSTHOOK: type: SHOW_GRANT
+database default
+table src_autho_test
+principalName src_role
+principalType ROLE
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+PREHOOK: query: revoke select on table src_autho_test from role src_role
+PREHOOK: type: REVOKE_PRIVILEGE
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: revoke select on table src_autho_test from role src_role
+POSTHOOK: type: REVOKE_PRIVILEGE
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: -- drop role
+drop role src_role
+PREHOOK: type: DROPROLE
+POSTHOOK: query: -- drop role
+drop role src_role
+POSTHOOK: type: DROPROLE
+PREHOOK: query: drop table src_autho_test
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@src_autho_test
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: drop table src_autho_test
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@src_autho_test
+POSTHOOK: Output: default@src_autho_test
Index: ql/src/test/results/clientpositive/authorization_role_grant1.q.out
===================================================================
--- ql/src/test/results/clientpositive/authorization_role_grant1.q.out (revision 1560846)
+++ ql/src/test/results/clientpositive/authorization_role_grant1.q.out (working copy)
@@ -1,7 +1,11 @@
-PREHOOK: query: -- role granting without role keyword
+PREHOOK: query: -- enable sql standard authorization
+
+-- role granting without role keyword
create role src_role2
PREHOOK: type: CREATEROLE
-POSTHOOK: query: -- role granting without role keyword
+POSTHOOK: query: -- enable sql standard authorization
+
+-- role granting without role keyword
create role src_role2
POSTHOOK: type: CREATEROLE
PREHOOK: query: grant src_role2 to user user2
@@ -13,6 +17,13 @@
POSTHOOK: query: show role grant user user2
POSTHOOK: type: SHOW_ROLE_GRANT
src_role2
+
+PREHOOK: query: show roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show roles
+POSTHOOK: type: SHOW_ROLES
+src_role2
+
PREHOOK: query: -- revoke role without role keyword
revoke src_role2 from user user2
PREHOOK: type: REVOKE_ROLE
@@ -23,6 +34,12 @@
PREHOOK: type: SHOW_ROLE_GRANT
POSTHOOK: query: show role grant user user2
POSTHOOK: type: SHOW_ROLE_GRANT
+PREHOOK: query: show roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show roles
+POSTHOOK: type: SHOW_ROLES
+src_role2
+
PREHOOK: query: ----------------------------------------
-- role granting without role keyword, with admin option (syntax check)
----------------------------------------
@@ -44,6 +61,7 @@
POSTHOOK: query: show role grant user user2
POSTHOOK: type: SHOW_ROLE_GRANT
src_role_wadmin
+
PREHOOK: query: -- revoke role without role keyword
revoke src_role_wadmin from user user2 with admin option
PREHOOK: type: REVOKE_ROLE
@@ -54,3 +72,30 @@
PREHOOK: type: SHOW_ROLE_GRANT
POSTHOOK: query: show role grant user user2
POSTHOOK: type: SHOW_ROLE_GRANT
+PREHOOK: query: -- drop roles
+show roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: -- drop roles
+show roles
+POSTHOOK: type: SHOW_ROLES
+src_role2
+src_role_wadmin
+
+PREHOOK: query: drop role src_role2
+PREHOOK: type: DROPROLE
+POSTHOOK: query: drop role src_role2
+POSTHOOK: type: DROPROLE
+PREHOOK: query: show roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show roles
+POSTHOOK: type: SHOW_ROLES
+src_role_wadmin
+
+PREHOOK: query: drop role src_role_wadmin
+PREHOOK: type: DROPROLE
+POSTHOOK: query: drop role src_role_wadmin
+POSTHOOK: type: DROPROLE
+PREHOOK: query: show roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show roles
+POSTHOOK: type: SHOW_ROLES
Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (working copy)
@@ -36,6 +36,7 @@
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hive.exception.HiveException;
import org.apache.thrift.TException;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (working copy)
@@ -49,6 +49,7 @@
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
import org.apache.thrift.protocol.TBinaryProtocol;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java (working copy)
@@ -21,6 +21,8 @@
import java.util.HashSet;
import java.util.Set;
+import org.apache.hive.exception.ErrorMsg;
+
import junit.framework.Assert;
import junit.framework.TestCase;
Index: ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java (working copy)
@@ -31,11 +31,11 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFSumLong;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncAbsLongToLong;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.*;
import org.apache.hadoop.hive.ql.udf.generic.*;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
import org.junit.Before;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (working copy)
@@ -22,7 +22,6 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* TestExpressionEvaluator.
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
import org.apache.hadoop.hive.ql.exec.vector.util.VectorizedRowGroupGenUtil;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -53,6 +52,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java (working copy)
@@ -36,8 +36,8 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarEqualStringColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java (working copy)
@@ -27,9 +27,9 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualDoubleScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (working copy)
@@ -94,7 +94,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractLongColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -127,6 +126,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
public class TestVectorizationContext {
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (working copy)
@@ -41,7 +41,6 @@
import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromLongIterables;
import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromObjectIterables;
import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromRepeats;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -57,6 +56,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.RCFile;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
@@ -55,6 +54,7 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hive.exception.HiveException;
import org.junit.Before;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java (working copy)
@@ -25,9 +25,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* Vectorized data source operator for testing.
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java (working copy)
@@ -25,9 +25,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* Operator that captures output emitted by parent.
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java (working copy)
@@ -29,7 +29,7 @@
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* Test helper class that creates vectorized execution batches from arbitrary type iterables.
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java (working copy)
@@ -24,8 +24,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromObjectIterables;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/generic/GenericUDFIsNull.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/generic/GenericUDFIsNull.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/generic/GenericUDFIsNull.java (working copy)
@@ -22,10 +22,10 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name = "myisnull",
value = "_FUNC_(value,default_value) - Returns default value if value is null else returns value",
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java (working copy)
@@ -22,6 +22,7 @@
import java.util.ArrayList;
import java.util.List;
+
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -32,7 +33,6 @@
import org.apache.hadoop.hive.ql.exec.vector.udf.generic.GenericUDFIsNull;
import org.apache.hadoop.hive.ql.exec.vector.udf.legacy.ConcatTextLongDoubleUDF;
import org.apache.hadoop.hive.ql.exec.vector.udf.legacy.LongUDF;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -41,6 +41,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
/*
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (working copy)
@@ -26,7 +26,6 @@
import java.util.Map;
import org.apache.hadoop.hive.ql.exec.vector.util.VectorizedRowGroupGenUtil;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
import org.junit.Test;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java (working copy)
@@ -42,7 +42,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -53,6 +52,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hive.exception.HiveException;
import org.apache.tez.client.TezSession;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.Edge;
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java (working copy)
@@ -24,7 +24,6 @@
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.BeforeClass;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.exception.HiveException;
public class TestUtilities extends TestCase {
Index: ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java (working copy)
@@ -20,6 +20,7 @@
import static org.junit.Assert.*;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hive.security.HiveOperationType;
import org.junit.Test;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java (working copy)
@@ -30,7 +30,6 @@
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
@@ -46,6 +45,7 @@
import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.parse.ASTNode;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
Index: ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java (working copy)
@@ -21,6 +21,7 @@
import org.antlr.runtime.CommonToken;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.parse.ASTNode;
import org.junit.Before;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java (working copy)
@@ -7,6 +7,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.parse.ASTNode;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.parse.ASTNode;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java (working copy)
@@ -29,6 +29,7 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro;
+import org.apache.hive.parse.ASTNode;
import org.junit.Before;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java (working copy)
@@ -26,13 +26,13 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.CollectDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.exception.HiveException;
import org.junit.Ignore;
/**
Index: ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java (working copy)
@@ -7,7 +7,6 @@
import org.apache.hadoop.hive.ql.exec.CollectOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
@@ -16,6 +15,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
public class OperatorTestUtils {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java (working copy)
@@ -23,7 +23,6 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFDateAdd extends TestCase {
public void testStringToDate() throws HiveException {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateDiff.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateDiff.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateDiff.java (working copy)
@@ -23,7 +23,6 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFDateDiff extends TestCase {
public void testStringToDate() throws HiveException {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java (working copy)
@@ -23,7 +23,6 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFDateSub extends TestCase {
public void testStringToDate() throws HiveException {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDate.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDate.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDate.java (working copy)
@@ -23,7 +23,6 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDate;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFDate extends TestCase {
public void testStringToDate() throws HiveException {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -34,6 +33,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java (working copy)
@@ -21,7 +21,6 @@
import junit.framework.TestCase;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs;
@@ -33,6 +32,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFAbs extends TestCase {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java (working copy)
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -25,6 +24,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import junit.framework.TestCase;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -41,6 +40,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -33,6 +32,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -34,6 +33,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java (working copy)
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -26,6 +25,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import junit.framework.TestCase;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -41,6 +40,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java (working copy)
@@ -19,11 +19,12 @@
package org.apache.hadoop.hive.ql.udf.generic;
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDAFCorrelation extends TestCase {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -34,6 +33,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java (working copy)
@@ -22,11 +22,11 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFDecode extends TestCase {
public void testDecode() throws UnsupportedEncodingException, HiveException {
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java (working copy)
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -26,6 +25,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import junit.framework.TestCase;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java (working copy)
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -25,6 +24,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import junit.framework.TestCase;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -34,6 +33,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -34,6 +33,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java (working copy)
@@ -22,12 +22,12 @@
import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BytesWritable;
+import org.apache.hive.exception.HiveException;
public class TestGenericUDFEncode extends TestCase {
public void testEncode() throws UnsupportedEncodingException, HiveException{
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java (working copy)
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -25,6 +24,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import junit.framework.TestCase;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java (working copy)
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -33,6 +32,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -41,6 +40,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java (revision 1560846)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -41,6 +40,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.junit.Assert;
import org.junit.Test;
Index: ql/src/test/queries/clientpositive/authorization_role_grant1.q
===================================================================
--- ql/src/test/queries/clientpositive/authorization_role_grant1.q (revision 1560846)
+++ ql/src/test/queries/clientpositive/authorization_role_grant1.q (working copy)
@@ -1,3 +1,6 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+-- enable sql standard authorization
+
-- role granting without role keyword
create role src_role2;
grant src_role2 to user user2 ;
@@ -2,6 +5,8 @@
show role grant user user2;
+show roles;
-- revoke role without role keyword
revoke src_role2 from user user2;
show role grant user user2;
+show roles;
@@ -18,3 +23,12 @@
-- revoke role without role keyword
revoke src_role_wadmin from user user2 with admin option;
show role grant user user2;
+
+
+
+-- drop roles
+show roles;
+drop role src_role2;
+show roles;
+drop role src_role_wadmin;
+show roles;
Index: ql/src/test/queries/clientpositive/authorization_1_sql_std.q
===================================================================
--- ql/src/test/queries/clientpositive/authorization_1_sql_std.q (revision 0)
+++ ql/src/test/queries/clientpositive/authorization_1_sql_std.q (revision 0)
@@ -0,0 +1,33 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+
+create table src_autho_test (key STRING, value STRING) ;
+
+set hive.security.authorization.enabled=true;
+
+--table grant to user
+
+grant select on table src_autho_test to user hive_test_user;
+
+show grant user hive_test_user on table src_autho_test;
+
+
+revoke select on table src_autho_test from user hive_test_user;
+show grant user hive_test_user on table src_autho_test;
+
+--role
+create role src_role;
+grant role src_role to user hive_test_user;
+show role grant user hive_test_user;
+
+--table grant to role
+
+grant select on table src_autho_test to role src_role;
+
+show grant role src_role on table src_autho_test;
+revoke select on table src_autho_test from role src_role;
+
+-- drop role
+drop role src_role;
+
+set hive.security.authorization.enabled=false;
+drop table src_autho_test;
\ No newline at end of file
Index: ql/src/test/templates/TestParse.vm
===================================================================
--- ql/src/test/templates/TestParse.vm (revision 1560846)
+++ ql/src/test/templates/TestParse.vm (working copy)
@@ -27,6 +27,7 @@
import org.apache.hadoop.hive.ql.QTestUtil;
import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hive.parse.ASTNode;
public class $className extends TestCase {
Index: ql/src/test/templates/TestParseNegative.vm
===================================================================
--- ql/src/test/templates/TestParseNegative.vm (revision 1560846)
+++ ql/src/test/templates/TestParseNegative.vm (working copy)
@@ -27,6 +27,7 @@
import org.apache.hadoop.hive.ql.QTestUtil;
import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hive.parse.ASTNode;
public class $className extends TestCase {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java (working copy)
@@ -34,7 +34,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -42,6 +41,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
//try to replace a bucket map join with a sorted merge map join
public class SortedMergeBucketMapJoinOptimizer implements Transform {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerExpressionOperatorFactory.java (working copy)
@@ -21,7 +21,6 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -31,6 +30,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
+import org.apache.hive.parse.Node;
/**
* Expression processor factory for pruning. Each processor tries to
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (working copy)
@@ -31,14 +31,12 @@
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanWork;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -52,6 +50,8 @@
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.StatsWork;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - table scan.
*/
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java (working copy)
@@ -30,7 +30,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
@@ -38,10 +37,8 @@
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -57,6 +54,9 @@
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
//try to replace a bucket map join with a sorted merge map join
abstract public class AbstractSMBJoinProc extends AbstractBucketJoinProc implements NodeProcessor {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java (working copy)
@@ -25,12 +25,12 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - reduce sink followed by reduce sink.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy)
@@ -33,7 +33,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -41,6 +40,7 @@
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - table scan followed by reduce sink.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerOperatorFactory.java (working copy)
@@ -24,13 +24,13 @@
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hive.parse.Node;
/**
* Operator factory for pruning processing of operator graph We find
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java (working copy)
@@ -25,8 +25,8 @@
import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hive.exception.HiveException;
/**
* The basic implementation of PartitionExpressionProxy that uses ql package classes.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (working copy)
@@ -35,13 +35,11 @@
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.PrunerUtils;
@@ -57,6 +55,8 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* The transformation step that does partition pruning.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (working copy)
@@ -20,7 +20,6 @@
import java.util.Map;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.PrunerExpressionOperatorFactory;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hive.parse.Node;
/**
* Expression processor factory for partition pruning. Each processor tries to
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
@@ -35,6 +34,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
public class PartExprEvalUtils {
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java (working copy)
@@ -21,11 +21,11 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
public class SortedMergeJoinProc extends AbstractSMBJoinProc implements NodeProcessor {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/TableSizeBasedBigTableSelectorForAutoSMJ.java (working copy)
@@ -25,13 +25,13 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.HiveException;
/*
* This is a pluggable policy to chose the candidate map-join table for converting a join to a
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java (working copy)
@@ -40,13 +40,13 @@
import org.apache.hadoop.hive.ql.index.IndexMetadataChangeTask;
import org.apache.hadoop.hive.ql.index.IndexMetadataChangeWork;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.physical.index.IndexWhereProcessor;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.HiveException;
/**
* Utility class for index support.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java (working copy)
@@ -26,13 +26,13 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.OptimizeTezProcContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hive.parse.Node;
/**
* SetReducerParallelism determines how many reducers should
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext.UnionParseContext;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Operator factory for union processing.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java (working copy)
@@ -23,7 +23,6 @@
import java.util.Map;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
@@ -31,7 +30,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -39,6 +37,8 @@
import org.apache.hadoop.hive.ql.optimizer.Transform;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.Node;
/**
* Implementation of the union processor. This can be enhanced later on.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java (working copy)
@@ -42,7 +42,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -70,6 +69,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.parse.Node;
/**
* SkewJoinOptimizer.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (working copy)
@@ -33,7 +33,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
@@ -52,7 +51,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -81,6 +79,8 @@
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.Node;
/**
* Implementation of one of the rule-based map join optimization. User passes hints to specify
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java (working copy)
@@ -14,7 +14,6 @@
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.RowSchema;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.GenTezProcContext;
@@ -27,6 +26,7 @@
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.TezWork;
import org.apache.hadoop.hive.ql.plan.TezWork.EdgeType;
+import org.apache.hive.parse.Node;
public class ReduceSinkMapJoinProc implements NodeProcessor {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java (working copy)
@@ -30,13 +30,13 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* this transformation does bucket map join optimization.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ReduceSinkDeDuplication.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -48,6 +47,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hive.parse.Node;
/**
* If two reducer sink operators share the same partition/sort columns and order,
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java (working copy)
@@ -50,7 +50,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -68,6 +67,7 @@
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Implementation of Correlation Optimizer. This optimizer is based on
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationUtilities.java (working copy)
@@ -41,7 +41,6 @@
import org.apache.hadoop.hive.ql.exec.SelectOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.RowResolver;
@@ -55,6 +54,7 @@
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hive.exception.HiveException;
/**
* Utilities for both CorrelationOptimizer and ReduceSinkDeDuplication.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java (working copy)
@@ -41,7 +41,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -56,6 +55,7 @@
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
+import org.apache.hive.parse.Node;
/**
* This transformation does optimization for enforcing bucketing and sorting.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java (working copy)
@@ -42,13 +42,11 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -70,6 +68,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/** There is a set of queries which can be answered entirely from statistics stored in metastore.
* Examples of such queries are count(*), count(a), max(a), min(b) etc. Hive already collects
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.parse.GlobalLimitCtx;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.ql.parse.SplitSample;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.exception.HiveException;
/**
* This optimizer is used to reduce the input size for the query for queries which are
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx;
@@ -43,6 +42,7 @@
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - TableScan followed by Union.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java (working copy)
@@ -30,12 +30,10 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
@@ -50,6 +48,8 @@
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* Expression processor factory for partition condition removing. Each processor tries to
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java (working copy)
@@ -26,16 +26,16 @@
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* PcrOpProcFactory contains processors that process expression tree of filter operators
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java (working copy)
@@ -32,13 +32,13 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.optimizer.Transform;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* The transformation step that does partition condition remover.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java (working copy)
@@ -34,14 +34,11 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -54,6 +51,9 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* this transformation does bucket map join optimization.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy)
@@ -51,7 +51,6 @@
import org.apache.hadoop.hive.ql.exec.UDTFOperator;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
@@ -77,6 +76,7 @@
import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.parse.Node;
/**
* Factory for generating the different node processors used by ColumnPruner.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java (working copy)
@@ -43,7 +43,6 @@
import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
import org.apache.hadoop.hive.ql.metadata.InputEstimator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -62,6 +61,7 @@
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.exception.HiveException;
/**
* Tries to convert simple fetch query to single fetch task, which fetches rows directly
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java (working copy)
@@ -27,13 +27,13 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.HiveException;
/*
* This is a pluggable policy to choose the candidate map-join table for converting a join to a
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java (working copy)
@@ -33,7 +33,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -44,6 +43,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hive.parse.Node;
/**
* merges SEL-SEL or FIL-FIL into single operator
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (working copy)
@@ -25,13 +25,13 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - table scan followed by reduce sink.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/LimitPushdownOptimizer.java (working copy)
@@ -33,13 +33,13 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Make RS calculate top-K selection for limit clause.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Utils;
@@ -35,6 +34,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - union followed by reduce sink.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchAggregation.java (working copy)
@@ -35,7 +35,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -49,6 +48,7 @@
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hive.parse.Node;
// execute final aggregation stage for simple fetch query on fetch task
public class SimpleFetchAggregation implements Transform {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java (working copy)
@@ -38,7 +38,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -49,6 +48,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
+import org.apache.hive.parse.Node;
/**
* The transformation step that does sample pruning.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -40,6 +39,7 @@
import org.apache.hadoop.hive.ql.optimizer.Transform;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
public class AnnotateWithStatistics implements Transform {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
@@ -37,10 +36,8 @@
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -68,6 +65,9 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java (working copy)
@@ -28,15 +28,15 @@
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.OptimizeTezProcContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.Statistics;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* ConvertJoinMapJoin is an optimization that replaces a common join
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapjoinProc.java (working copy)
@@ -20,13 +20,13 @@
import java.util.Stack;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.Node;
public class BucketMapjoinProc extends AbstractBucketJoinProc implements NodeProcessor {
public BucketMapjoinProc(ParseContext pGraphContext) {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapjoinProc.java (working copy)
@@ -21,14 +21,14 @@
import java.util.Stack;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.Node;
public class SortedMergeBucketMapjoinProc extends AbstractSMBJoinProc implements NodeProcessor {
public SortedMergeBucketMapjoinProc(ParseContext pctx) {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (working copy)
@@ -58,7 +58,6 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
@@ -96,6 +95,7 @@
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hive.exception.HiveException;
/**
* General utility common functions for the Processor to convert operator into
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java (working copy)
@@ -24,13 +24,13 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.optimizer.PrunerOperatorFactory;
import org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.HiveException;
/**
* Walk through top operators in tree to find all partitions.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBExprProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBExprProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBExprProcFactory.java (working copy)
@@ -19,7 +19,6 @@
import java.util.Map;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hive.parse.Node;
/**
* Expression processor factory for list bucketing pruning. Each processor tries to
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java (working copy)
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -45,6 +44,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.parse.Node;
/**
* This class generates the lineage information for the columns
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java (working copy)
@@ -35,7 +35,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -48,6 +47,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Expression processor factory for lineage. Each processor is responsible to
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (working copy)
@@ -46,7 +46,6 @@
import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency;
import org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyType;
import org.apache.hadoop.hive.ql.hooks.LineageInfo.TableAliasInfo;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Utils;
@@ -58,6 +57,7 @@
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hive.parse.Node;
/**
* Operator factory for the rule processors for lineage.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java (working copy)
@@ -22,12 +22,12 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Processor for the rule - no specific rule fired.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Operator factory for MapJoin processing.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -43,6 +42,7 @@
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Factory of methods used by {@link RewriteGBUsingIndex}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
@@ -46,6 +45,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* RewriteCanApplyCtx class stores the context for the {@link RewriteCanApplyProcFactory}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java (working copy)
@@ -39,7 +39,6 @@
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.index.AggregateIndexHandler;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.IndexUtils;
@@ -48,6 +47,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.exception.HiveException;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
@@ -34,6 +33,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.ASTNode;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -44,6 +43,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hive.parse.Node;
/**
* RewriteQueryUsingAggregateIndexCtx class stores the
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java (working copy)
@@ -37,10 +37,8 @@
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -60,6 +58,8 @@
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* This class defines a procedure factory used to rewrite the operator plan
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (working copy)
@@ -40,7 +40,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
@@ -48,6 +47,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Implementation of one of the rule-based optimization steps. ColumnPruner gets
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java (working copy)
@@ -41,12 +41,10 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -62,6 +60,8 @@
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* This transformation does group by optimization. If the grouping key is a superset
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java (working copy)
@@ -39,7 +39,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -47,6 +46,7 @@
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java (working copy)
@@ -23,12 +23,12 @@
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.physical.SkewJoinResolver.SkewJoinProcCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Node processor factory for skew join resolver.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinResolver.java (working copy)
@@ -21,9 +21,9 @@
import java.util.List;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.TaskGraphWalker;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/*
* If a join has been automatically converted into a sort-merge join, create a conditional
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -39,6 +38,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hive.parse.Node;
/**
* An implementation of PhysicalPlanResolver. It iterator each task with a rule
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java (working copy)
@@ -31,11 +31,11 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.TaskGraphWalker.TaskGraphWalkerContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
+import org.apache.hive.parse.Node;
/**
* Common iteration methods for converting joins and sort-merge joins.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java (working copy)
@@ -22,8 +22,8 @@
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.HiveException;
/**
* A hierarchy physical optimizer, which contains a list of
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java (working copy)
@@ -39,7 +39,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -56,6 +55,7 @@
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* An implementation of PhysicalPlanResolver. It iterator each MapRedTask to see whether the task
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -49,6 +48,7 @@
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hive.parse.Node;
/**
* Node processor factory for map join resolver. What it did is to replace the
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/IndexWhereResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/IndexWhereResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/IndexWhereResolver.java (working copy)
@@ -23,9 +23,9 @@
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.optimizer.physical.index.IndexWhereTaskDispatcher;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
public class IndexWhereResolver implements PhysicalPlanResolver {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java (working copy)
@@ -41,7 +41,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
@@ -56,6 +55,7 @@
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.NullStructSerDe;
+import org.apache.hive.parse.Node;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java (working copy)
@@ -38,10 +38,8 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.index.HiveIndexQueryContext;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -53,6 +51,8 @@
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java (working copy)
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -47,6 +46,7 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hive.parse.Node;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java (working copy)
@@ -21,9 +21,9 @@
import java.util.List;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.TaskGraphWalker;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/*
* Convert tasks involving JOIN into MAPJOIN.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingOpProcFactory.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Utils;
@@ -44,6 +43,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc.ExprNodeDescEqualityWrapper;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Operator factory for the rule processors for inferring bucketing/sorting columns.
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (working copy)
@@ -58,14 +58,12 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.lib.TaskGraphWalker;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -160,6 +158,8 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTrim;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
public class Vectorizer implements PhysicalPlanResolver {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -42,6 +41,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hive.parse.Node;
/**
* General utility common functions for the Pruner to do optimization.
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (working copy)
@@ -26,11 +26,11 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Context for Expression Walker for determining predicate pushdown candidates
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -44,6 +43,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Expression factory for predicate pushdown processing. Each processor
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (working copy)
@@ -24,12 +24,12 @@
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Context class for operator walker of predicate pushdown.
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (working copy)
@@ -34,13 +34,13 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.optimizer.Transform;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Implements predicate pushdown. Predicate pushdown is a term borrowed from
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (working copy)
@@ -40,13 +40,11 @@
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
@@ -63,6 +61,8 @@
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.Node;
/**
* Operator factory for predicate pushdown processing of operator graph Each
Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java (working copy)
@@ -38,7 +38,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
@@ -55,6 +54,7 @@
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* propagates filters to other aliases based on join condition
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -41,6 +40,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.ErrorMsg;
/**
* Describes a GenericFunc node.
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java (working copy)
@@ -22,8 +22,8 @@
import java.util.List;
import java.util.Set;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.exception.ErrorMsg;
/**
* Common utilities for validation.
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java (working copy)
@@ -21,10 +21,10 @@
import java.io.Serializable;
import java.util.List;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.parse.Node;
/**
* ExprNodeDesc.
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (working copy)
@@ -45,7 +45,6 @@
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -66,6 +65,7 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hive.exception.HiveException;
/**
* PlanUtils.
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (working copy)
@@ -29,12 +29,12 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* PartitionDesc.
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (working copy)
@@ -30,7 +30,6 @@
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
@@ -40,6 +39,7 @@
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.ErrorMsg;
/**
* CreateTableDesc.
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.PTFPartition;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
import org.apache.hadoop.hive.ql.parse.WindowingExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef;
@@ -57,6 +56,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
@SuppressWarnings("deprecation")
public class PTFDeserializer {
Index: ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (working copy)
@@ -34,7 +34,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockManagerCtx;
@@ -45,10 +44,11 @@
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.Watcher;
Index: ql/src/java/org/apache/hadoop/hive/ql/lockmgr/LockException.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/LockException.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/LockException.java (working copy)
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.lockmgr;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* Exception from lock manager.
Index: ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java (working copy)
@@ -33,9 +33,9 @@
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hive.exception.HiveException;
/**
* shared lock manager for dedicated hive server. all locks are managed in memory
Index: ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java (working copy)
@@ -29,16 +29,16 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.Node;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/Utils.java (working copy)
@@ -20,6 +20,8 @@
import java.util.Stack;
+import org.apache.hive.parse.Node;
+
/**
* Contains common utility functions to manipulate nodes, walkers etc.
*/
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java (working copy)
@@ -23,6 +23,7 @@
import java.util.regex.Pattern;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Rule interface for Nodes Used in Node dispatching to dispatch process/visitor
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/CompositeProcessor.java (working copy)
@@ -20,6 +20,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* CompositeProcessor. Holds a list of node processors to be fired by the same
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java (working copy)
@@ -22,6 +22,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Dispatches calls to relevant method in processor. The user registers various
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java (working copy)
@@ -20,6 +20,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Base class for processing operators which is no-op. The specific processors
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java (working copy)
@@ -21,6 +21,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Dispatcher interface for Operators Used in operator graph walking to dispatch
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java (working copy)
@@ -22,6 +22,7 @@
import java.util.HashMap;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Interface for operator graph walker.
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java (working copy)
@@ -21,6 +21,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Rule interface for Operators Used in operator dispatching to dispatch
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/PreOrderWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/PreOrderWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/PreOrderWalker.java (working copy)
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.lib;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* base class for operator graph walker this class takes list of starting ops
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/RuleExactMatch.java (working copy)
@@ -21,6 +21,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* Implentation of the Rule interface for Nodes Used in Node dispatching to dispatch
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java (working copy)
@@ -30,6 +30,7 @@
import org.apache.hadoop.hive.ql.exec.ConditionalTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* base class for operator graph walker this class takes list of starting ops
Index: ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (working copy)
@@ -26,6 +26,7 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.Node;
/**
* base class for operator graph walker this class takes list of starting ops
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (working copy)
@@ -27,6 +27,7 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.ASTNode;
/**
* Internal representation of the join tree.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (working copy)
@@ -30,11 +30,12 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
/**
* ColumnStatsSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java (working copy)
@@ -23,6 +23,7 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hive.parse.ASTNode;
/**
* HiveSemanticAnalyzerHook allows Hive to be extended with custom
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java (working copy)
@@ -22,7 +22,7 @@
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hive.parse.Node;
/**
* Walks the operator tree in pre order fashion.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (working copy)
@@ -33,18 +33,19 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.plan.CopyWork;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.StatsWork;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
/**
* LoadSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java (working copy)
@@ -24,7 +24,7 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* Context information provided by Hive to implementations of
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java (working copy)
@@ -29,13 +29,14 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.plan.CopyWork;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
/**
* ExportSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (working copy)
@@ -21,12 +21,13 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
import org.apache.hadoop.hive.ql.plan.FunctionWork;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
/**
* FunctionSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java (working copy)
@@ -22,12 +22,12 @@
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLag;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLead;
+import org.apache.hive.exception.HiveException;
/*
* When constructing the Evaluator Tree from an ExprNode Tree
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy)
@@ -33,7 +33,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -44,7 +43,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -72,6 +70,9 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.Node;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (working copy)
@@ -46,11 +46,9 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.index.HiveIndexQueryContext;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -64,6 +62,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
public class IndexUpdater {
private List loadTableWork;
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java (working copy)
@@ -50,7 +50,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
@@ -71,6 +70,7 @@
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.parse.Node;
public class MapReduceCompiler extends TaskCompiler {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (working copy)
@@ -28,6 +28,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+import org.apache.hive.parse.ASTNode;
/**
* Implementation of the query block.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (working copy)
@@ -35,13 +35,11 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.PTFPartition;
import org.apache.hadoop.hive.ql.exec.WindowFunctionInfo;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFInputSpec;
@@ -98,6 +96,9 @@
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
public class PTFTranslator {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (working copy)
@@ -42,12 +42,10 @@
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
@@ -58,6 +56,9 @@
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
/**
* ImportSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy)
@@ -50,7 +50,6 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
@@ -80,10 +79,8 @@
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -179,6 +176,11 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.ASTNodeOrigin;
+import org.apache.hive.parse.Node;
/**
* Implementation of the semantic analyzer. It generates the query plan.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (working copy)
@@ -43,7 +43,6 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.LineageInfo;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
@@ -54,6 +53,8 @@
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
/**
* Parse Context: The current parse context. This is passed to the optimizer
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java (working copy)
@@ -28,6 +28,7 @@
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec;
+import org.apache.hive.parse.ASTNode;
/*
* Captures the Window processing specified in a Query. A Query may
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticException.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticException.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticException.java (working copy)
@@ -18,8 +18,8 @@
package org.apache.hadoop.hive.ql.parse;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* Exception from SemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java (working copy)
@@ -20,6 +20,8 @@
import java.util.ArrayList;
+import org.apache.hive.parse.ASTNode;
+
/**
*
* This class stores all the information specified in the TABLESAMPLE clause.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (working copy)
@@ -29,9 +29,10 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
/**
* Implementation of the Row Resolver.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (working copy)
@@ -27,6 +27,7 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
+import org.apache.hive.parse.ASTNode;
/**
* ExplainSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java (working copy)
@@ -32,12 +32,10 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
@@ -45,6 +43,9 @@
import org.apache.hadoop.hive.ql.plan.FunctionWork;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.Node;
/**
* MacroSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnStatsTask;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.StatsTask;
@@ -43,7 +42,6 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
import org.apache.hadoop.hive.ql.plan.ColumnStatsDesc;
@@ -57,6 +55,8 @@
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* TaskCompiler is a the base class for classes that compile
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java (working copy)
@@ -5,12 +5,14 @@
import java.util.Map;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.parse.QBSubQuery.SubQueryType;
import org.apache.hadoop.hive.ql.parse.QBSubQuery.SubQueryTypeDef;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.ASTNodeOrigin;
public class SubQueryUtils {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWorkWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWorkWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWorkWalker.java (working copy)
@@ -25,9 +25,9 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* Walks the operator tree in DFS fashion.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ASTErrorNode.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ASTErrorNode.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ASTErrorNode.java (working copy)
@@ -22,6 +22,7 @@
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenStream;
import org.antlr.runtime.tree.CommonErrorNode;
+import org.apache.hive.parse.ASTNode;
public class ASTErrorNode extends ASTNode {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (working copy)
@@ -30,6 +30,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec;
+import org.apache.hive.parse.ASTNode;
/**
* Implementation of the parse information related to a query block.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (working copy)
@@ -23,10 +23,10 @@
import java.util.Stack;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.parse.Node;
/**
* PrintOpTreeProcessor.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java (working copy)
@@ -22,6 +22,7 @@
import java.util.List;
import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hive.parse.ASTNode;
public abstract class AbstractSemanticAnalyzerHook implements
HiveSemanticAnalyzerHook {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -49,6 +48,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
+import org.apache.hive.parse.Node;
/**
* TableAccessAnalyzer walks the operator graph from joins and group bys
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java (working copy)
@@ -24,8 +24,8 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.parse.ASTNode;
/**
* HiveAuthorizationTaskFactory creates DDL authorization related
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (working copy)
@@ -26,16 +26,13 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -52,6 +49,9 @@
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
/**
* Default implementation of HiveAuthorizationTaskFactory
*/
@@ -190,7 +190,7 @@
if (grandChild.getToken().getType() == HiveParser.TOK_PARTSPEC) {
privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(grandChild));
} else if (grandChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) {
- cols = BaseSemanticAnalyzer.getColumnNames((ASTNode) grandChild);
+ cols = BaseSemanticAnalyzer.getColumnNames(grandChild);
}
}
}
@@ -235,8 +235,12 @@
&& SessionState.get().getAuthenticator() != null) {
roleOwnerName = SessionState.get().getAuthenticator().getUserName();
}
+
+ //until change is made to use the admin option. Default to false with V2 authorization
+ boolean isAdmin = SessionState.get().isAuthorizationModeV2() ? false : true;
+
GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant,
- roles, principalDesc, roleOwnerName, PrincipalType.USER, true);
+ roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL), conf);
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (working copy)
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.parse;
import java.util.ArrayList;
+
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.NoViableAltException;
@@ -32,6 +33,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.Context;
+import org.apache.hive.parse.ASTNode;
/**
* ParseDriver.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java (working copy)
@@ -27,6 +27,7 @@
import org.antlr.runtime.TokenRewriteStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hive.parse.ASTNode;
/**
* UnparseTranslator is used to "unparse" objects such as views when their
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java (working copy)
@@ -47,7 +47,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -62,6 +61,7 @@
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TezWork;
+import org.apache.hive.parse.Node;
/**
* TezCompiler translates the operator plan into TezTasks.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PTFInvocationSpec.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/PTFInvocationSpec.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/PTFInvocationSpec.java (working copy)
@@ -22,6 +22,7 @@
import java.util.List;
import org.apache.hadoop.hive.ql.exec.PTFUtils;
+import org.apache.hive.parse.ASTNode;
public class PTFInvocationSpec {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.TezWork;
import org.apache.hadoop.hive.ql.plan.TezWork.EdgeType;
+import org.apache.hive.parse.Node;
/**
* GenTezWork separates the operator tree into tez tasks.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java (working copy)
@@ -42,7 +42,7 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hive.exception.ErrorMsg;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy)
@@ -23,6 +23,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.parse.ASTNode;
/**
* SemanticAnalyzerFactory.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java (working copy)
@@ -7,9 +7,7 @@
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.parse.SubQueryUtils.ISubQueryJoinInfo;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.DefaultExprProcessor;
@@ -17,6 +15,10 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.ASTNodeOrigin;
+import org.apache.hive.parse.Node;
public class QBSubQuery implements ISubQueryJoinInfo {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy)
@@ -44,7 +44,6 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.FetchTask;
@@ -59,9 +58,7 @@
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -80,6 +77,10 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.Node;
import com.google.common.annotations.VisibleForTesting;
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java (working copy)
@@ -25,10 +25,10 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
+import org.apache.hive.parse.Node;
/**
* FileSinkProcessor handles addition of merge, move and stats tasks for filesinks
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TezWalker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TezWalker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TezWalker.java (working copy)
@@ -22,7 +22,7 @@
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hive.parse.Node;
/**
* Walks the operator tree in DFS fashion.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
@@ -34,6 +33,8 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.parse.ASTNode;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy)
@@ -53,7 +53,6 @@
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.Task;
@@ -66,9 +65,7 @@
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -128,6 +125,10 @@
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.parse.Node;
/**
* DDLSemanticAnalyzer.
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java (working copy)
@@ -25,7 +25,7 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
public class HiveSemanticAnalyzerHookContextImpl implements HiveSemanticAnalyzerHookContext {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java (working copy)
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.parse;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hive.parse.ASTNode;
/**
* This class implements the context information that is used for typechecking
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java (working copy)
@@ -36,6 +36,7 @@
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.ql.metadata.CheckResult.PartitionResult;
+import org.apache.hive.exception.HiveException;
import org.apache.thrift.TException;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java (working copy)
@@ -32,6 +32,7 @@
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hive.exception.HiveException;
/**
* DefaultStorageHandler is an implementation of {@link HiveStorageHandler}
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (working copy)
@@ -47,6 +47,7 @@
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hive.exception.HiveException;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.transport.TMemoryBuffer;
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (working copy)
@@ -29,11 +29,12 @@
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.DefaultHiveAuthorizerFactory;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.security.HiveAuthorizer;
/**
* General collection of helper functions.
@@ -47,8 +48,6 @@
public static final String RBRACKET = "]";
public static final String LBRACE = "{";
public static final String RBRACE = "}";
- public static final String LINE_SEP = System.getProperty("line.separator");
-
public static String escapeString(String str) {
int length = str.length();
StringBuilder escape = new StringBuilder(length + 16);
@@ -392,7 +391,7 @@
throws HiveException {
Class extends HiveAuthorizerFactory> cls = conf.getClass(authorizationProviderConfKey.varname,
- DefaultHiveAuthorizerFactory.class, HiveAuthorizerFactory.class);
+ SQLStdHiveAuthorizerFactory.class, HiveAuthorizerFactory.class);
if(cls == null){
//should not happen as default value is set
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java (working copy)
@@ -18,7 +18,8 @@
package org.apache.hadoop.hive.ql.metadata;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* Generic exception class for Hive.
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java (working copy)
@@ -28,6 +28,7 @@
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hive.exception.HiveException;
/**
* HiveStorageHandler defines a pluggable interface for adding
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Sample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Sample.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Sample.java (working copy)
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.metadata;
+import org.apache.hive.exception.HiveException;
+
/**
* A sample defines a subset of data based on sampling on a given dimension.
*
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveFatalException.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveFatalException.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveFatalException.java (working copy)
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.metadata;
+import org.apache.hive.exception.HiveException;
+
public class HiveFatalException extends HiveException {
public HiveFatalException() {
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java (working copy)
@@ -26,6 +26,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hive.exception.HiveException;
/**
* A Hive Table Partition: is a fundamental storage unit within a Table. Currently, Hive does not support
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (working copy)
@@ -47,7 +47,6 @@
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
@@ -64,6 +63,8 @@
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* A Hive Table: is a fundamental unit of data in Hive that shares a common schema/DDL.
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/InputEstimator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/InputEstimator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/InputEstimator.java (working copy)
@@ -20,6 +20,7 @@
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.exception.HiveException;
// Plugin interface for storage handler which supports input estimation
public interface InputEstimator {
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java (working copy)
@@ -23,13 +23,14 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hive.exception.HiveException;
/**
* Interface to format table and index information. We can format it
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java (working copy)
@@ -38,10 +38,10 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.exception.HiveException;
/**
* Format table and index information for human readability using
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java (working copy)
@@ -37,9 +37,9 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hive.exception.HiveException;
import org.codehaus.jackson.map.ObjectMapper;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy)
@@ -94,6 +94,7 @@
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
import org.apache.thrift.TException;
import com.google.common.collect.Sets;
Index: ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (working copy)
@@ -50,18 +50,17 @@
import org.apache.hadoop.hive.ql.history.HiveHistoryProxyHandler;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
import org.apache.hadoop.hive.ql.util.DosToUnix;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.security.HiveAuthorizer;
/**
* SessionState encapsulates common data associated with a session.
@@ -307,7 +306,20 @@
// that would cause ClassNoFoundException otherwise
throw new RuntimeException(e);
}
- setupAuth(startSs);
+
+ if (HiveConf.getVar(startSs.getConf(), HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)
+ .equals("tez")) {
+ try {
+ if (startSs.tezSessionState == null) {
+ startSs.tezSessionState = new TezSessionState();
+ }
+ startSs.tezSessionState.open(startSs.getSessionId(), startSs.conf);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ LOG.info("No Tez session required at this point. hive.execution.engine=mr.");
+ }
return startSs;
}
@@ -315,41 +327,38 @@
* Setup authentication and authorization plugins for this session.
* @param startSs
*/
- private static void setupAuth(SessionState startSs) {
+ private void setupAuth() {
+
+ if(authenticator != null){
+ //auth has been initialized
+ return;
+ }
+
try {
- startSs.authenticator = HiveUtils.getAuthenticator(
- startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
- startSs.authorizer = HiveUtils.getAuthorizeProviderManager(
- startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
- startSs.authenticator, true);
+ authenticator = HiveUtils.getAuthenticator(
+ getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
+ authorizer = HiveUtils.getAuthorizeProviderManager(
+ getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
+ authenticator, true);
- if(startSs.authorizer == null){
+ if(authorizer == null){
//if it was null, the new authorization plugin must be specified in config
HiveAuthorizerFactory authorizerFactory =
- HiveUtils.getAuthorizerFactory(startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
- startSs.authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
- startSs.getConf(), startSs.authenticator.getUserName());
+ HiveUtils.getAuthorizerFactory(getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
+ authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
+ getConf(), authenticator.getUserName());
}
else{
- startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
- .getConf());
+ createTableGrants = CreateTableAutomaticGrant.create(getConf());
}
} catch (HiveException e) {
throw new RuntimeException(e);
}
- if (HiveConf.getVar(startSs.getConf(), HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)
- .equals("tez")) {
- try {
- if (startSs.tezSessionState == null) {
- startSs.tezSessionState = new TezSessionState();
- }
- startSs.tezSessionState.open(startSs.getSessionId(), startSs.conf);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- } else {
- LOG.info("No Tez session required at this point. hive.execution.engine=mr.");
+ if(LOG.isDebugEnabled()){
+ Object authorizationClass = getAuthorizationMode() == AuthorizationMode.V1 ?
+ getAuthorizer() : getAuthorizerV2();
+ LOG.debug("Session is using authorization class " + authorizationClass.getClass());
}
return;
}
@@ -777,6 +786,7 @@
}
public HiveAuthorizationProvider getAuthorizer() {
+ setupAuth();
return authorizer;
}
@@ -785,10 +795,12 @@
}
public HiveAuthorizer getAuthorizerV2() {
+ setupAuth();
return authorizerV2;
}
public HiveAuthenticationProvider getAuthenticator() {
+ setupAuth();
return authenticator;
}
@@ -882,6 +894,7 @@
}
public AuthorizationMode getAuthorizationMode(){
+ setupAuth();
if(authorizer != null){
return AuthorizationMode.V1;
}else if(authorizerV2 != null){
Index: ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java (working copy)
@@ -26,9 +26,9 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
+import org.apache.hive.exception.HiveException;
public class CreateTableAutomaticGrant {
private Map> userGrants;
Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java (working copy)
@@ -25,9 +25,9 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hive.exception.HiveException;
/**
* Implementation of a pre execute hook that updates the access
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java (working copy)
@@ -38,7 +38,6 @@
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ColumnStatsWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -52,6 +51,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* ColumnStatsTask implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java (working copy)
@@ -18,10 +18,10 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
// This function will not be used currently, since the function expressions
// change the void to the first matching argument
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java (working copy)
@@ -18,8 +18,8 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Returns evaluation result of other evaluator
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
@@ -43,6 +42,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
public class PTFOperator extends Operator implements Serializable {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java (working copy)
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hive.parse.Node;
import java.util.Collection;
import java.util.HashSet;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* FunctionTask.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (working copy)
@@ -27,13 +27,13 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* Join operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java (working copy)
@@ -34,7 +34,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.BucketMapJoinContext;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
@@ -50,6 +49,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.PriorityQueue;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* Sorted Merge Map Join Operator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* FetchTask implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java (working copy)
@@ -18,10 +18,10 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* ExprNodeConstantEvaluator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java (working copy)
@@ -22,8 +22,8 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.exception.HiveException;
/**
* SecureCmdDoAs - Helper class for setting parameters and env necessary for
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java (working copy)
@@ -20,11 +20,11 @@
import java.io.Serializable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.HashTableDummyDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hive.exception.HiveException;
public class HashTableDummyOperator extends Operator implements Serializable {
private static final long serialVersionUID = 1L;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java (working copy)
@@ -33,10 +33,10 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.io.HiveKey;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.io.BinaryComparable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.WritableComparator;
+import org.apache.hive.exception.HiveException;
/**
* Stores binary key/value in sorted manner to get top-n key/value
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java (working copy)
@@ -18,13 +18,13 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
+import org.apache.hive.exception.HiveException;
/**
* This evaluator gets the column from the row object.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -44,6 +43,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* At runtime in Join, we output big keys in one table into one corresponding
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (working copy)
@@ -35,7 +35,6 @@
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.io.IOContext;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -57,6 +56,7 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* Map operator. This triggers overall map side processing. This is a little
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (working copy)
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinRowContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.HashTableSinkDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
@@ -48,6 +47,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
public class HashTableSinkOperator extends TerminalOperator implements
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java (working copy)
@@ -25,13 +25,13 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey;
import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
public abstract class AbstractMapJoinOperator extends CommonJoinOperator implements
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey;
import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -45,6 +44,7 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
public class JoinUtil {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java (working copy)
@@ -21,13 +21,13 @@
import java.util.HashMap;
import java.util.Map;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
+import org.apache.hive.exception.HiveException;
/**
* ExprNodeEvaluatorFactory.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java (working copy)
@@ -22,12 +22,12 @@
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.plan.CollectDesc;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* Buffers rows emitted by other operators.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (working copy)
@@ -32,14 +32,14 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* Task implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java (working copy)
@@ -18,9 +18,9 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* ExprNodeEvaluator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java (working copy)
@@ -22,13 +22,13 @@
import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.LateralViewJoinDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* The lateral view join operator is used for FROM src LATERAL VIEW udtf()...
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy)
@@ -40,7 +40,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
@@ -134,6 +133,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (working copy)
@@ -33,7 +33,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.io.FSRecordWriter;
import org.apache.hadoop.hive.ql.io.FSRecordWriter.StatsProvidingRecordWriter;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -43,7 +42,6 @@
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.io.HivePartitioner;
import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
@@ -66,6 +64,8 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* File Sink operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java (working copy)
@@ -20,7 +20,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
@@ -30,6 +29,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* ExprNodeGenericFuncEvaluator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableLoader.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableLoader.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableLoader.java (working copy)
@@ -21,8 +21,8 @@
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hive.exception.HiveException;
/**
* HashTableLoader is an interface used by MapJoinOperator used to load the hashtables
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java (working copy)
@@ -27,11 +27,11 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* serialize row by user specified serde and call toString() to make string type result
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java (working copy)
@@ -26,13 +26,13 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.MuxDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* MuxOperator is used in the Reduce side of MapReduce jobs optimized by Correlation Optimizer.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java (working copy)
@@ -21,9 +21,9 @@
import java.io.Serializable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExtractDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* Extract operator implementation Extracts a subobject and passes that on.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java (working copy)
@@ -18,8 +18,8 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Increases version number of each evaluations for correct caching
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/KeyWrapper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/KeyWrapper.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/KeyWrapper.java (working copy)
@@ -18,8 +18,8 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
public abstract class KeyWrapper {
public abstract void getNewKey(Object row, ObjectInspector rowInspector) throws HiveException;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (working copy)
@@ -23,11 +23,11 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Select operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java (working copy)
@@ -36,10 +36,10 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hive.exception.HiveException;
/**
* ArchiveUtils.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (working copy)
@@ -38,7 +38,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -68,6 +67,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GroupBy operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.FSRecordWriter;
import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.PTFDeserializer;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde.serdeConstants;
@@ -47,6 +46,7 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.util.Progressable;
+import org.apache.hive.exception.HiveException;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java (working copy)
@@ -34,7 +34,6 @@
import org.apache.hadoop.hive.ql.io.FSRecordWriter;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -47,6 +46,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* Simple persistent container for rows.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java (working copy)
@@ -25,10 +25,10 @@
import java.util.ConcurrentModificationException;
import java.util.Map;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
@SuppressWarnings("deprecation")
public class MapJoinTableContainerSerDe {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java (working copy)
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.exec.persistence;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
public abstract class AbstractRowContainer {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (working copy)
@@ -41,7 +41,6 @@
import org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveRecordReader;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.parse.SplitSample;
import org.apache.hadoop.hive.ql.plan.FetchWork;
@@ -67,6 +66,7 @@
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* FetchTask implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (working copy)
@@ -28,8 +28,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -44,6 +42,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* Table Scan Operator If the data is coming from the map-reduce framework, just
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (working copy)
@@ -31,13 +31,13 @@
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* Map side Join operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java (working copy)
@@ -21,13 +21,13 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* This Evaluator can evaluate s.f for s as both struct and list of struct. If s
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.UDTFDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
@@ -34,6 +33,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* UDTFOperator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java (working copy)
@@ -20,9 +20,9 @@
import java.io.Serializable;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ForwardDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* Forward Operator Just forwards. Doesn't do anything itself.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (working copy)
@@ -34,8 +34,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -49,6 +47,8 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* ScriptOperator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (working copy)
@@ -34,8 +34,6 @@
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec;
@@ -48,6 +46,8 @@
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.ql.stats.StatsPublisher;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
* StatsTask implementation. StatsTask mainly deals with "collectable" stats. These are
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterLongColumnInList.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterLongColumnInList.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterLongColumnInList.java (working copy)
@@ -21,9 +21,9 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFLike;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import java.util.Arrays;
import java.util.List;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDoubleColumnInList.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDoubleColumnInList.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDoubleColumnInList.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFLike;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import java.util.Arrays;
import java.util.List;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java (working copy)
@@ -18,8 +18,8 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import java.util.Arrays;
import java.util.List;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -72,6 +71,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java (working copy)
@@ -19,9 +19,9 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* Interface used to create Writable objects from vector expression primitives.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColLikeStringScalar.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColLikeStringScalar.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColLikeStringScalar.java (working copy)
@@ -18,9 +18,9 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFLike;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import java.util.Arrays;
import java.util.List;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringColumnInList.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringColumnInList.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringColumnInList.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFLike;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import java.util.Arrays;
import java.util.List;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColumnInList.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColumnInList.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColumnInList.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFLike;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import java.util.Arrays;
import java.util.List;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java (working copy)
@@ -22,12 +22,12 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* VectorUDAFCountStar. Vectorized implementation for COUNT(*) aggregates.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorAggregateExpression.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Base class for aggregation expressions.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java (working copy)
@@ -23,12 +23,12 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.common.StatsSetupConst;
@@ -35,6 +34,7 @@
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* File Sink operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java (working copy)
@@ -26,9 +26,9 @@
import java.util.Map.Entry;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* A VectorizedRowBatch is a set of rows, organized with each column
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java (working copy)
@@ -20,8 +20,8 @@
import org.apache.hadoop.hive.ql.exec.MapOperator;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
public class VectorMapOperator extends MapOperator {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java (working copy)
@@ -27,13 +27,13 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* Select operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java (working copy)
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.ql.exec.vector;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* Describes a vector expression and encapsulates the {@link Mode}, number of arguments,
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java (working copy)
@@ -34,13 +34,13 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* VectorSMBJoinOperator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java (working copy)
@@ -22,7 +22,6 @@
import java.sql.Timestamp;
import java.util.List;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -41,6 +40,7 @@
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* VectorizedColumnarSerDe is used by Vectorized query execution engine
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (working copy)
@@ -71,7 +71,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnNotBetween;
import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -90,6 +89,7 @@
import org.apache.hadoop.hive.ql.udf.generic.*;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* Context class for vectorization execution.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java (working copy)
@@ -19,9 +19,9 @@
package org.apache.hadoop.hive.ql.exec.vector;
import org.apache.hadoop.hive.ql.exec.LimitOperator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hive.exception.HiveException;
/**
* Limit operator implementation Limits the number of rows to be passed on.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java (working copy)
@@ -23,7 +23,6 @@
import java.util.List;
import java.util.Map;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -37,6 +36,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* This class is used as a static factory for VectorColumnAssign.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java (working copy)
@@ -21,7 +21,6 @@
import java.sql.Timestamp;
import java.util.List;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -36,6 +35,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class VectorizedBatchUtil {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hive.ql.exec.KeyWrapper;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* A hash map key wrapper for vectorized processing.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java (working copy)
@@ -22,8 +22,8 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hive.exception.HiveException;
/**
* Class for handling vectorized hash map key wrappers. It evaluates the key columns in a
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java (working copy)
@@ -23,11 +23,11 @@
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* Filter operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssign.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssign.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssign.java (working copy)
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
public interface VectorColumnAssign {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
@@ -46,6 +45,7 @@
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* Vectorized GROUP BY operator implementation. Consumes the vectorized input and
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (working copy)
@@ -30,7 +30,6 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -43,6 +42,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hive.exception.HiveException;
/**
* Context for Vectorized row batch. this calss does eager deserialization of row data using serde
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
import org.apache.hadoop.hive.ql.io.HiveKey;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -45,6 +44,7 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
// import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
public class VectorReduceSinkOperator extends ReduceSinkOperator {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java (working copy)
@@ -34,13 +34,13 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* The vectorized version of the MapJoinOperator.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java (working copy)
@@ -24,13 +24,13 @@
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* Descriptor for function argument.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* A VectorUDFAdaptor is a vectorized expression for invoking a custom
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
import org.apache.hadoop.hive.ql.exec.vector.VectorSMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.CollectDesc;
import org.apache.hadoop.hive.ql.plan.DemuxDesc;
import org.apache.hadoop.hive.ql.plan.DummyStoreDesc;
@@ -58,6 +57,7 @@
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.UDTFDesc;
import org.apache.hadoop.hive.ql.plan.UnionDesc;
+import org.apache.hive.exception.HiveException;
/**
* OperatorFactory.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java (working copy)
@@ -21,12 +21,12 @@
import java.io.Serializable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.DummyStoreDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hive.exception.HiveException;
/**
* For SortMerge joins, this is a dummy operator, which stores the row for the
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy)
@@ -34,8 +34,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -48,6 +46,8 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.Node;
/**
* Base operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (working copy)
@@ -23,12 +23,12 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.IOContext;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* Filter operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java (working copy)
@@ -23,7 +23,6 @@
import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.UnionDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver;
@@ -31,6 +30,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Union Operator Just forwards. Doesn't do anything itself.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java (working copy)
@@ -32,7 +32,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.HiveKey;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.io.BytesWritable;
@@ -42,6 +41,7 @@
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hive.exception.HiveException;
public class PartitionKeySampler implements OutputCollector {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.HiveKey;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
@@ -48,6 +47,7 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hive.exception.HiveException;
/**
* Reduce Sink Operator sends output to the reduce stage.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java (working copy)
@@ -41,10 +41,10 @@
import org.antlr.runtime.tree.BaseTree;
import org.antlr.runtime.tree.CommonTree;
import org.apache.hadoop.hive.ql.exec.Utilities.EnumDelegate;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.parse.ASTNode;
public class PTFUtils {
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy)
@@ -102,7 +102,6 @@
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
@@ -126,7 +125,6 @@
import org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateMapper;
import org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork;
import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.InputEstimator;
@@ -174,6 +172,8 @@
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java (working copy)
@@ -23,11 +23,11 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ListSinkDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* For fetch task with operator tree, row read from FetchOperator is processed via operator tree
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionException.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionException.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionException.java (working copy)
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hive.ql.exec.mapjoin;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java (working copy)
@@ -18,9 +18,9 @@
package org.apache.hadoop.hive.ql.exec;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.LateralViewForwardDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* LateralViewForwardOperator. This operator sits at the head of the operator
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/KeyWrapperFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/KeyWrapperFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/KeyWrapperFactory.java (working copy)
@@ -20,7 +20,6 @@
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectsEqualComparer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class KeyWrapperFactory {
public KeyWrapperFactory(ExprNodeEvaluator[] keyFields, ObjectInspector[] keyObjectInspectors,
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java (working copy)
@@ -32,10 +32,10 @@
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinRowContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.library.api.KeyValueReader;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (working copy)
@@ -39,7 +39,6 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.TezWork;
import org.apache.hadoop.hive.ql.plan.TezWork.EdgeType;
@@ -48,6 +47,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hive.exception.HiveException;
import org.apache.tez.client.TezSession;
import org.apache.tez.common.counters.TezCounters;
import org.apache.tez.dag.api.DAG;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
import org.apache.hadoop.hive.ql.exec.tez.tools.InputMerger;
import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -52,6 +51,7 @@
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
import org.apache.tez.mapreduce.processor.MRTaskReporter;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.library.api.KeyValuesReader;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java (working copy)
@@ -35,8 +35,8 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hive.exception.ErrorMsg;
import org.apache.tez.client.AMConfiguration;
import org.apache.tez.client.TezSession;
import org.apache.tez.client.TezSessionConfiguration;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java (working copy)
@@ -37,7 +37,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
import org.apache.hadoop.hive.ql.exec.mr.ExecReducer;
@@ -45,7 +44,6 @@
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveKey;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
@@ -67,6 +65,8 @@
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
import org.apache.tez.dag.api.Edge;
import org.apache.tez.dag.api.EdgeProperty;
import org.apache.tez.dag.api.EdgeProperty.DataMovementType;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java (working copy)
@@ -27,12 +27,12 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.exec.persistence.PTFRowContainer;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/*
* represents a collection of rows that is acted upon by a TableFunction or a WindowFunction.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer;
import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -41,6 +40,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* Join operator implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy)
@@ -80,7 +80,6 @@
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -96,7 +95,6 @@
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
import org.apache.hadoop.hive.ql.metadata.CheckResult;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
@@ -155,13 +153,8 @@
import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -175,6 +168,15 @@
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.security.HiveAuthorizer;
+import org.apache.hive.security.HivePrincipal;
+import org.apache.hive.security.HivePrivilege;
+import org.apache.hive.security.HivePrivilegeInfo;
+import org.apache.hive.security.HivePrivilegeObject;
+import org.apache.hive.security.HivePrincipal.HivePrincipalType;
+import org.apache.hive.security.HivePrivilegeObject.HivePrivilegeObjectType;
import org.stringtemplate.v4.ST;
/**
@@ -541,6 +543,10 @@
}
private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
+
+ if(SessionState.get().isAuthorizationModeV2()){
+ return showGrantsV2(showGrantDesc);
+ }
StringBuilder builder = new StringBuilder();
try {
PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc();
@@ -639,6 +645,44 @@
return 0;
}
+ private int showGrantsV2(ShowGrantDesc showGrantDesc) throws HiveException {
+ HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+ StringBuilder builder = new StringBuilder();
+ try {
+ List privInfos = authorizer.showPrivileges(
+ getHivePrincipal(showGrantDesc.getPrincipalDesc()),
+ getHivePrivilegeObject(showGrantDesc.getHiveObj())
+ );
+ for(HivePrivilegeInfo privInfo : privInfos){
+ HivePrincipal principal = privInfo.getPrincipal();
+ HivePrivilegeObject privObj = privInfo.getObject();
+ HivePrivilege priv = privInfo.getPrivilege();
+
+ PrivilegeGrantInfo grantInfo =
+ AuthorizationUtils.getThriftPrivilegeGrantInfo(priv, privInfo.getGrantorPrincipal(),
+ privInfo.isGrantOption());
+
+ //only grantInfo is used
+ HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(null, null, null, grantInfo);
+ List privList = new ArrayList();
+ privList.add(thriftObjectPriv);
+ writeGrantInfo(builder,
+ AuthorizationUtils.getThriftPrincipalType(principal.getType()),
+ principal.getName(),
+ privObj.getDbname(),
+ privObj.getTableviewname(),
+ null,
+ null,
+ privList
+ );
+ }
+ writeToFile(builder.toString(), showGrantDesc.getResFile());
+ } catch (IOException e) {
+ throw new HiveException("Error in show grant statement", e);
+ }
+ return 0;
+ }
+
private static void sortPrivileges(List privileges) {
Collections.sort(privileges, new Comparator() {
@@ -823,6 +867,24 @@
return new HivePrivilegeObject(getPrivObjectType(privSubjectDesc), dbTable[0], dbTable[1]);
}
+ private HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
+ if(type == null){
+ return null;
+ }
+
+ switch(type){
+ case USER:
+ return HivePrincipalType.USER;
+ case ROLE:
+ return HivePrincipalType.ROLE;
+ case GROUP:
+ throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+ default:
+ //should not happen as we take care of all existing types
+ throw new HiveException("Unsupported authorization type specified");
+ }
+ }
+
private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) {
//TODO: This needs to change to support view once view grant/revoke is supported as
// part of HIVE-6181
@@ -841,24 +903,14 @@
private List getHivePrincipals(List principals) throws HiveException {
ArrayList hivePrincipals = new ArrayList();
for(PrincipalDesc principal : principals){
- hivePrincipals.add(
- new HivePrincipal(principal.getName(), getHivePrincipalType(principal.getType())));
+ hivePrincipals.add(getHivePrincipal(principal));
}
return hivePrincipals;
}
- private HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
- switch(type){
- case USER:
- return HivePrincipalType.USER;
- case ROLE:
- return HivePrincipalType.ROLE;
- case GROUP:
- throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
- default:
- //should not happen as we take care of all existing types
- throw new HiveException("Unsupported authorization type specified");
- }
+ private HivePrincipal getHivePrincipal(PrincipalDesc principal) throws HiveException {
+ return new HivePrincipal(principal.getName(),
+ AuthorizationUtils.getHivePrincipalType(principal.getType()));
}
private void throwNotFound(String objType, String objName) throws HiveException {
@@ -915,7 +967,7 @@
LOG.info("role ddl exception: " + stringifyException(e));
return 1;
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
@@ -945,6 +997,7 @@
throw new HiveException("Unkown role operation "
+ operation.getOperationName());
}
+
return 0;
}
@@ -955,10 +1008,10 @@
* @throws IOException
*/
private void writeListToFile(List entries, String resFile) throws IOException {
- StringBuilder sb = new StringBuilder(entries.size()*2);
+ StringBuilder sb = new StringBuilder();
for(String entry : entries){
sb.append(entry);
- sb.append(terminator);
+ sb.append((char)terminator);
}
writeToFile(sb.toString(), resFile);
}
@@ -2191,7 +2244,7 @@
} catch (Exception e) {
throw new HiveException(e);
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
@@ -2249,7 +2302,7 @@
} catch (Exception e) {
throw new HiveException(e.toString());
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
@@ -2368,7 +2421,7 @@
} catch (IOException e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
}
@@ -2420,7 +2473,7 @@
} catch (Exception e) {
throw new HiveException(e);
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
}
@@ -2512,7 +2565,7 @@
} catch (Exception e) {
throw new HiveException(e.toString());
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
}
@@ -2753,7 +2806,7 @@
} catch (Exception e) {
throw new HiveException(e);
} finally {
- IOUtils.closeStream((FSDataOutputStream) outStream);
+ IOUtils.closeStream(outStream);
}
return 0;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy)
@@ -50,7 +50,6 @@
import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol;
@@ -66,6 +65,7 @@
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* MoveTask implementation.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java (working copy)
@@ -21,9 +21,9 @@
import java.io.Serializable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hive.exception.HiveException;
/**
* Limit operator implementation Limits the number of rows to be passed on.
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java (working copy)
@@ -31,7 +31,6 @@
import java.util.regex.Pattern;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.errors.ErrorAndSolution;
import org.apache.hadoop.hive.ql.exec.errors.TaskLogProcessor;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -39,6 +38,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TaskCompletionEvent;
+import org.apache.hive.exception.ErrorMsg;
/**
* JobDebugger takes a RunningJob that has failed and grabs the top 4 failing
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (working copy)
@@ -46,7 +46,6 @@
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.FetchOperator;
import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner;
@@ -60,7 +59,6 @@
import org.apache.hadoop.hive.ql.io.HiveKey;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
@@ -83,6 +81,8 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
import org.apache.log4j.Appender;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.FileAppender;
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java (working copy)
@@ -31,9 +31,9 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.exception.HiveException;
/**
* HashTableLoader for MR loads the hashtable for MapJoins from local disk (hashtables
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (working copy)
@@ -55,7 +55,6 @@
import org.apache.hadoop.hive.ql.exec.mapjoin.MapJoinMemoryExhaustionException;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.BucketMapJoinContext;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
@@ -70,6 +69,7 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* MapredLocalTask represents any local work (i.e.: client side work) that hive needs to
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java (working copy)
@@ -35,7 +35,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.reportStats;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -52,6 +51,7 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* ExecReducer is the generic Reducer class for Hive. Together with ExecMapper it is
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.DemuxDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* DemuxOperator is an operator used by MapReduce Jobs optimized by
Index: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (working copy)
@@ -34,7 +34,6 @@
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
@@ -76,6 +75,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
import org.apache.hadoop.io.BytesWritable;
+import org.apache.hive.exception.HiveException;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
Index: ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java (working copy)
@@ -14,11 +14,11 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
+import org.apache.hive.exception.HiveException;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.Path;
Index: ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java (working copy)
@@ -33,7 +33,6 @@
import org.apache.hadoop.hive.ql.io.InputFormatChecker;
import org.apache.hadoop.hive.ql.io.orc.Reader.FileMetaInfo;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileSplit;
@@ -41,6 +40,7 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hive.exception.HiveException;
/**
* A MapReduce/Hive input format for ORC files.
Index: ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcSerde.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcSerde.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcSerde.java (working copy)
@@ -20,11 +20,11 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* A serde class for ORC.
Index: ql/src/java/org/apache/hadoop/hive/ql/io/VectorizedRCFileRecordReader.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/VectorizedRCFileRecordReader.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/VectorizedRCFileRecordReader.java (working copy)
@@ -32,13 +32,13 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
import org.apache.hadoop.hive.ql.io.RCFile.KeyBuffer;
import org.apache.hadoop.hive.ql.io.RCFile.Reader;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hive.exception.HiveException;
/**
* RCFileRecordReader.
Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (working copy)
@@ -35,7 +35,6 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
@@ -53,6 +52,7 @@
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* An util class for various Hive file format tasks.
Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.RCFile;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.shims.CombineHiveKey;
@@ -39,6 +38,7 @@
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hive.exception.HiveException;
@SuppressWarnings("deprecation")
public class RCFileMergeMapper extends MapReduceBase implements
Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java (working copy)
@@ -30,7 +30,6 @@
import org.apache.hadoop.hive.ql.io.RCFile;
import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper;
import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileValueBufferWrapper;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.shims.CombineHiveKey;
@@ -41,6 +40,7 @@
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hive.exception.HiveException;
@SuppressWarnings("deprecation")
public class ColumnTruncateMapper extends MapReduceBase implements
Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java (working copy)
@@ -34,7 +34,6 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -43,7 +42,6 @@
import org.apache.hadoop.hive.ql.exec.mr.Throttle;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -58,6 +56,8 @@
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
import org.apache.log4j.Appender;
import org.apache.log4j.FileAppender;
import org.apache.log4j.LogManager;
Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java (working copy)
@@ -26,12 +26,10 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.RCFile.KeyBuffer;
import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper;
import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileValueBufferWrapper;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.stats.CounterStatsPublisher;
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.ql.stats.StatsPublisher;
@@ -41,6 +39,8 @@
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexHandler.java (working copy)
@@ -27,10 +27,10 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hive.exception.HiveException;
/**
* HiveIndexHandler defines a pluggable interface for adding new index handlers
Index: ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java (working copy)
@@ -32,13 +32,13 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.LineRecordReader.LineReader;
+import org.apache.hive.exception.HiveException;
/**
* HiveIndexResult parses the input stream from an index query
Index: ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java (working copy)
@@ -38,7 +38,6 @@
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapred.FileInputFormat;
@@ -46,6 +45,7 @@
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.exception.HiveException;
/**
* Input format for doing queries that use indexes.
Index: ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java (working copy)
@@ -45,7 +45,6 @@
import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
import org.apache.hadoop.hive.ql.index.TableBasedIndexHandler;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler.DecomposedPredicate;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -64,6 +63,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hive.exception.HiveException;
public class CompactIndexHandler extends TableBasedIndexHandler {
Index: ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java (working copy)
@@ -42,7 +42,6 @@
import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer;
import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
import org.apache.hadoop.hive.ql.index.TableBasedIndexHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
@@ -55,6 +54,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hive.exception.HiveException;
/**
* Index handler for the bitmap index. Bitmap index uses an EWAH-compressed
Index: ql/src/java/org/apache/hadoop/hive/ql/index/AggregateIndexHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/AggregateIndexHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/AggregateIndexHandler.java (working copy)
@@ -32,11 +32,11 @@
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.index.compact.CompactIndexHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.IndexUtils;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hive.exception.HiveException;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java (working copy)
@@ -31,12 +31,12 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hive.exception.HiveException;
/**
* Index handler for indexes that use tables to store indexes.
Index: ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java (working copy)
@@ -31,7 +31,6 @@
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
@@ -45,6 +44,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
+import org.apache.hive.parse.Node;
/**
* IndexPredicateAnalyzer decomposes predicates, separating the parts
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (working copy)
@@ -39,12 +39,12 @@
import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
import org.apache.hadoop.hive.metastore.events.PreEventContext;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider;
+import org.apache.hive.exception.HiveException;
/**
* AuthorizationPreEventListener : A MetaStorePreEventListener that
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (working copy)
@@ -39,9 +39,9 @@
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hive.exception.HiveException;
/**
* StorageBasedAuthorizationProvider is an implementation of
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java (working copy)
@@ -20,7 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
public class DefaultHiveMetastoreAuthorizationProvider extends BitSetCheckedAuthorizationProvider
implements HiveMetastoreAuthorizationProvider {
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (revision 0)
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.security.HivePrincipal;
+import org.apache.hive.security.HivePrivilege;
+import org.apache.hive.security.HivePrivilegeObject;
+import org.apache.hive.security.HivePrincipal.HivePrincipalType;
+import org.apache.hive.security.HivePrivilegeObject.HivePrivilegeObjectType;
+
+/**
+ * Utility code shared by hive internal code and sql standard authorization plugin implementation
+ */
+@LimitedPrivate(value = { "Sql standard authorization plugin" })
+public class AuthorizationUtils {
+
+ /**
+ * Convert thrift principal type to authorization plugin principal type
+ * @param type - thrift principal type
+ * @return
+ * @throws HiveException
+ */
+ public static HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
+ switch(type){
+ case USER:
+ return HivePrincipalType.USER;
+ case ROLE:
+ return HivePrincipalType.ROLE;
+ case GROUP:
+ throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+ default:
+ //should not happen as we take care of all existing types
+ throw new HiveException("Unsupported authorization type specified");
+ }
+ }
+
+
+ /**
+ * Convert thrift object type to hive authorization plugin object type
+ * @param type - thrift object type
+ * @return
+ */
+ public static HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) {
+ switch(type){
+ case DATABASE:
+ return HivePrivilegeObjectType.DATABASE;
+ case TABLE:
+ return HivePrivilegeObjectType.TABLE;
+ case LOCAL_DIR:
+ case DFS_DIR:
+ return HivePrivilegeObjectType.URI;
+ case PARTITION:
+ case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions
+ return HivePrivilegeObjectType.PARTITION;
+ default:
+ return null;
+ }
+ }
+
+
+ /**
+ * Convert authorization plugin principal type to thrift principal type
+ * @param type
+ * @return
+ * @throws HiveException
+ */
+ public static PrincipalType getThriftPrincipalType(HivePrincipalType type)
+ throws HiveException {
+ if(type == null){
+ return null;
+ }
+ switch(type){
+ case USER:
+ return PrincipalType.USER;
+ case ROLE:
+ return PrincipalType.ROLE;
+ default:
+ throw new HiveException("Invalid principal type");
+ }
+ }
+
+
+ /**
+ * Get thrift privilege grant info
+ * @param privilege
+ * @param grantorPrincipal
+ * @param grantOption
+ * @return
+ * @throws HiveException
+ */
+ public static PrivilegeGrantInfo getThriftPrivilegeGrantInfo(HivePrivilege privilege,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveException {
+ return new PrivilegeGrantInfo(privilege.getName(), 0 /* time gets added by server */,
+ grantorPrincipal.getName(), getThriftPrincipalType(grantorPrincipal.getType()), grantOption);
+ }
+
+
+ /**
+ * Convert plugin privilege object type to thrift type
+ * @param type
+ * @return
+ * @throws HiveException
+ */
+ public static HiveObjectType getThriftHiveObjType(HivePrivilegeObjectType type) throws HiveException {
+ switch(type){
+ case DATABASE:
+ return HiveObjectType.DATABASE;
+ case TABLE:
+ return HiveObjectType.TABLE;
+ case PARTITION:
+ return HiveObjectType.PARTITION;
+ default:
+ throw new HiveException("Unsupported type");
+ }
+ }
+
+
+ /**
+ * Convert thrift HiveObjectRef to plugin HivePrivilegeObject
+ * @param privObj
+ * @return
+ * @throws HiveException
+ */
+ public static HiveObjectRef getThriftHiveObjectRef(HivePrivilegeObject privObj) throws HiveException {
+ HiveObjectType objType = getThriftHiveObjType(privObj.getType());
+ return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableviewname(), null, null);
+ }
+
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java (working copy)
@@ -31,8 +31,8 @@
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hive.exception.HiveException;
import org.apache.thrift.TException;
public abstract class HiveAuthorizationProviderBase implements
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java (working copy)
@@ -24,10 +24,10 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hive.exception.HiveException;
/**
* Hive's pluggable authorization provider interface
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java (working copy)
@@ -21,7 +21,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
public class DefaultHiveAuthorizationProvider extends
BitSetCheckedAuthorizationProvider {
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java (working copy)
@@ -29,9 +29,9 @@
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hive.exception.HiveException;
public abstract class BitSetCheckedAuthorizationProvider extends
HiveAuthorizationProviderBase {
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java (working copy)
@@ -20,6 +20,7 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.security.HiveAuthorizer;
/**
* Implementation of this interface specified through hive configuration will be used to
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java (working copy)
@@ -24,7 +24,7 @@
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* Private implementaiton that returns instance of IMetaStoreClient
*/
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java (revision 0)
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+import org.apache.hive.security.HiveAuthorizer;
+import org.apache.hive.security.HiveAuthorizerImpl;
+
+@Private
+public class SQLStdHiveAuthorizerFactory implements HiveAuthorizerFactory{
+ @Override
+ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, String hiveCurrentUser) {
+ return new HiveAuthorizerImpl(
+ new SQLStdHiveAccessController(metastoreClientFactory, conf, hiveCurrentUser),
+ new SQLStdHiveAuthorizationValidator()
+ );
+ }
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (revision 0)
@@ -0,0 +1,313 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+import org.apache.hive.exception.HiveAuthorizationPluginException;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.security.HiveAccessController;
+import org.apache.hive.security.HivePrincipal;
+import org.apache.hive.security.HivePrivilege;
+import org.apache.hive.security.HivePrivilegeInfo;
+import org.apache.hive.security.HivePrivilegeObject;
+import org.apache.hive.security.HivePrincipal.HivePrincipalType;
+import org.apache.hive.security.HivePrivilegeObject.HivePrivilegeObjectType;
+
+
+/**
+ * Implements functionality of access control statements for sql standard based authorization
+ */
+@Private
+public class SQLStdHiveAccessController implements HiveAccessController {
+
+ private HiveMetastoreClientFactory metastoreClientFactory;
+
+
+ SQLStdHiveAccessController(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, String hiveCurrentUser){
+ this.metastoreClientFactory = metastoreClientFactory;
+ }
+
+
+ @Override
+ public void grantPrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
+
+ PrivilegeBag privBag =
+ getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal,
+ grantOption);
+ try {
+ metastoreClientFactory.getHiveMetastoreClient().grant_privileges(privBag);
+ } catch (Exception e) {
+ throw new HiveAuthorizationPluginException("Error granting privileges", e);
+ }
+ }
+
+ /**
+ * Create thrift privileges bag
+ * @param hivePrincipals
+ * @param hivePrivileges
+ * @param hivePrivObject
+ * @param grantorPrincipal
+ * @param grantOption
+ * @return
+ * @throws HiveAuthorizationPluginException
+ */
+ private PrivilegeBag getThriftPrivilegesBag(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
+ HiveObjectRef privObj = getThriftHiveObjectRef(hivePrivObject);
+ PrivilegeBag privBag = new PrivilegeBag();
+ for(HivePrivilege privilege : hivePrivileges){
+ if(privilege.getColumns() != null && privilege.getColumns().size() > 0){
+ throw new HiveAuthorizationPluginException("Privileges on columns not supported currently"
+ + "in sql standard authorization mode");
+ }
+ PrivilegeGrantInfo grantInfo = getThriftPrivilegeGrantInfo(privilege, grantorPrincipal, grantOption);
+ for(HivePrincipal principal : hivePrincipals){
+ HiveObjectPrivilege objPriv = new HiveObjectPrivilege(privObj, principal.getName(),
+ getThriftPrincipalType(principal.getType()), grantInfo);
+ privBag.addToPrivileges(objPriv);
+ }
+ }
+ return privBag;
+ }
+
+ private PrincipalType getThriftPrincipalType(HivePrincipalType type)
+ throws HiveAuthorizationPluginException {
+ try {
+ return AuthorizationUtils.getThriftPrincipalType(type);
+ } catch (HiveException e) {
+ throw new HiveAuthorizationPluginException(e);
+ }
+ }
+
+ private PrivilegeGrantInfo getThriftPrivilegeGrantInfo(HivePrivilege privilege,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
+ try {
+ return AuthorizationUtils.getThriftPrivilegeGrantInfo(privilege, grantorPrincipal, grantOption);
+ } catch (HiveException e) {
+ throw new HiveAuthorizationPluginException(e);
+ }
+ }
+
+ /**
+ * Create a thrift privilege object from the plugin interface privilege object
+ * @param privObj
+ * @return
+ * @throws HiveAuthorizationPluginException
+ */
+ private HiveObjectRef getThriftHiveObjectRef(HivePrivilegeObject privObj)
+ throws HiveAuthorizationPluginException {
+ try {
+ return AuthorizationUtils.getThriftHiveObjectRef(privObj);
+ } catch (HiveException e) {
+ throw new HiveAuthorizationPluginException(e);
+ }
+ }
+
+ @Override
+ public void revokePrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
+
+ PrivilegeBag privBag =
+ getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal,
+ grantOption);
+ try {
+ metastoreClientFactory.getHiveMetastoreClient().revoke_privileges(privBag);
+ } catch (Exception e) {
+ throw new HiveAuthorizationPluginException("Error revoking privileges", e);
+ }
+ }
+
+ @Override
+ public void createRole(String roleName, HivePrincipal adminGrantor)
+ throws HiveAuthorizationPluginException {
+ try {
+ String grantorName = adminGrantor == null ? null : adminGrantor.getName();
+ metastoreClientFactory.getHiveMetastoreClient()
+ .create_role(new Role(roleName, 0, grantorName));
+ } catch (Exception e) {
+ throw new HiveAuthorizationPluginException("Error create role", e);
+ }
+ }
+
+ @Override
+ public void dropRole(String roleName) throws HiveAuthorizationPluginException {
+ try {
+ metastoreClientFactory.getHiveMetastoreClient().drop_role(roleName);
+ } catch (Exception e) {
+ throw new HiveAuthorizationPluginException("Error dropping role", e);
+ }
+ }
+
+ @Override
+ public List getRoles(HivePrincipal hivePrincipal) throws HiveAuthorizationPluginException {
+ try {
+ List roles = metastoreClientFactory.getHiveMetastoreClient().list_roles(
+ hivePrincipal.getName(), getThriftPrincipalType(hivePrincipal.getType()));
+ List roleNames = new ArrayList(roles.size());
+ for(Role role : roles){
+ roleNames.add(role.getRoleName());
+ }
+ return roleNames;
+ } catch (Exception e) {
+ throw new HiveAuthorizationPluginException(
+ "Error listing roles for user" + hivePrincipal.getName(), e);
+ }
+ }
+
+ @Override
+ public void grantRole(List hivePrincipals, List roleNames,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthorizationPluginException {
+ for(HivePrincipal hivePrincipal : hivePrincipals){
+ for(String roleName : roleNames){
+ try {
+ IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
+ mClient.grant_role(roleName,
+ hivePrincipal.getName(),
+ getThriftPrincipalType(hivePrincipal.getType()),
+ grantorPrinc.getName(),
+ getThriftPrincipalType(grantorPrinc.getType()),
+ grantOption
+ );
+ } catch (Exception e) {
+ String msg = "Error granting roles for " + hivePrincipal.getName() + " to role " + roleName
+ + hivePrincipal.getName();
+ throw new HiveAuthorizationPluginException(msg, e);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void revokeRole(List hivePrincipals, List roleNames,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthorizationPluginException {
+ if(grantOption){
+ //removing grant privileges only is not supported in metastore api
+ throw new HiveAuthorizationPluginException("Revoking only the admin privileges on "
+ + "role is not currently supported");
+ }
+ for(HivePrincipal hivePrincipal : hivePrincipals){
+ for(String roleName : roleNames){
+ try {
+ IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
+ mClient.revoke_role(roleName,
+ hivePrincipal.getName(),
+ getThriftPrincipalType(hivePrincipal.getType())
+ );
+ } catch (Exception e) {
+ String msg = "Error revoking roles for " + hivePrincipal.getName() + " to role " + roleName
+ + hivePrincipal.getName();
+ throw new HiveAuthorizationPluginException(msg, e);
+ }
+ }
+ }
+ }
+
+ @Override
+ public List getAllRoles() throws HiveAuthorizationPluginException {
+ try {
+ return metastoreClientFactory.getHiveMetastoreClient().listRoleNames();
+ } catch (Exception e) {
+ throw new HiveAuthorizationPluginException("Error listing all roles", e);
+ }
+ }
+
+
+ @Override
+ public List showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
+ throws HiveAuthorizationPluginException {
+ try {
+
+ List resPrivInfos = new ArrayList();
+ IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
+
+ //get metastore/thrift privilege object using metastore api
+ List msObjPrivs
+ = mClient.list_privileges(principal.getName(), getThriftPrincipalType(principal.getType()),
+ getThriftHiveObjectRef(privObj));
+
+ //convert the metastore thrift objects to result objects
+ for(HiveObjectPrivilege msObjPriv : msObjPrivs){
+ //result principal
+ HivePrincipal resPrincipal =
+ new HivePrincipal(msObjPriv.getPrincipalName(),
+ AuthorizationUtils.getHivePrincipalType(msObjPriv.getPrincipalType()));
+
+ //result privilege
+ PrivilegeGrantInfo msGrantInfo = msObjPriv.getGrantInfo();
+ HivePrivilege resPrivilege = new HivePrivilege(msGrantInfo.getPrivilege(), null);
+
+ //result object
+ HiveObjectRef msObjRef = msObjPriv.getHiveObject();
+ HivePrivilegeObject resPrivObj = new HivePrivilegeObject(
+ getPluginObjType(msObjRef.getObjectType()),
+ msObjRef.getDbName(),
+ msObjRef.getObjectName()
+ );
+
+ //result grantor principal
+ HivePrincipal grantorPrincipal =
+ new HivePrincipal(msGrantInfo.getGrantor(),
+ AuthorizationUtils.getHivePrincipalType(msGrantInfo.getGrantorType()));
+
+
+ HivePrivilegeInfo resPrivInfo = new HivePrivilegeInfo(resPrincipal, resPrivilege,
+ resPrivObj, grantorPrincipal, msGrantInfo.isGrantOption());
+ resPrivInfos.add(resPrivInfo);
+ }
+ return resPrivInfos;
+
+ }
+ catch (Exception e) {
+ throw new HiveAuthorizationPluginException("Error showing privileges", e);
+ }
+
+ }
+
+
+ private HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType)
+ throws HiveAuthorizationPluginException {
+ switch(objectType){
+ case DATABASE:
+ return HivePrivilegeObjectType.DATABASE;
+ case TABLE:
+ return HivePrivilegeObjectType.TABLE;
+ default:
+ throw new HiveAuthorizationPluginException("Unsupported object type " + objectType);
+ }
+ }
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (revision 0)
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
+
+import java.util.List;
+
+import org.apache.hive.exception.HiveAuthorizationPluginException;
+import org.apache.hive.security.HiveAuthorizationValidator;
+import org.apache.hive.security.HiveOperationType;
+import org.apache.hive.security.HivePrivilegeObject;
+
+public class SQLStdHiveAuthorizationValidator implements HiveAuthorizationValidator {
+
+ @Override
+ public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs,
+ List outputHObjs) throws HiveAuthorizationPluginException {
+ }
+
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java (working copy)
@@ -21,7 +21,7 @@
import java.util.List;
import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* HiveAuthenticationProvider is an interface for authentication. The
Index: ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (working copy)
@@ -22,9 +22,9 @@
import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.exception.HiveException;
public class HadoopDefaultAuthenticator implements HiveAuthenticationProvider {
Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy)
@@ -55,7 +55,6 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
import org.apache.hadoop.hive.ql.hooks.Entity;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
import org.apache.hadoop.hive.ql.hooks.Hook;
import org.apache.hadoop.hive.ql.hooks.HookContext;
@@ -76,14 +75,12 @@
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.formatting.JsonMetaDataFormatter;
import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
@@ -102,9 +99,7 @@
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde2.ByteStream;
@@ -113,6 +108,12 @@
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.ErrorMsg;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
+import org.apache.hive.security.HiveOperationType;
+import org.apache.hive.security.HivePrivilegeObject;
+import org.apache.hive.security.HivePrivilegeObject.HivePrivilegeObjectType;
public class Driver implements CommandProcessor {
@@ -704,7 +705,7 @@
}
private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs,
- HashSet outputs) {
+ HashSet outputs) throws HiveException {
HiveOperationType hiveOpType = getHiveOperationType(op);
List inputsHObjs = getHivePrivObjects(inputs);
List outputHObjs = getHivePrivObjects(outputs);
@@ -712,36 +713,26 @@
return;
}
- private List getHivePrivObjects(HashSet extends Entity> inputs) {
+ private List getHivePrivObjects(HashSet extends Entity> privObjects) {
List hivePrivobjs = new ArrayList();
- for(Entity input : inputs){
- HivePrivilegeObjectType privObjType = getHivePrivilegeObjectType(input.getType());
+ if(privObjects == null){
+ return hivePrivobjs;
+ }
+ for(Entity privObject : privObjects){
+ HivePrivilegeObjectType privObjType =
+ AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
+
//support for authorization on partitions or uri needs to be added
HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType,
- input.getDatabase().getName(),
- input.getTable().getTableName());
+ privObject.getDatabase() == null ? null : privObject.getDatabase().getName(),
+ privObject.getTable() == null ? null : privObject.getTable().getTableName());
hivePrivobjs.add(hPrivObject);
}
return hivePrivobjs;
}
- private HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) {
- switch(type){
- case DATABASE:
- return HivePrivilegeObjectType.DATABASE;
- case TABLE:
- return HivePrivilegeObjectType.TABLE;
- case LOCAL_DIR:
- case DFS_DIR:
- return HivePrivilegeObjectType.URI;
- case PARTITION:
- case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions
- return HivePrivilegeObjectType.PARTITION;
- default:
- return null;
- }
- }
+
private HiveOperationType getHiveOperationType(HiveOperation op) {
return HiveOperationType.valueOf(op.name());
}
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncAbsDoubleToDouble;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncAbsLongToLong;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFAbs.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java (working copy)
@@ -22,13 +22,13 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFIndex.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToMap.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToMap.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToMap.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -35,6 +34,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFStringToMap.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -35,6 +34,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser.Feature;
import org.codehaus.jackson.map.ObjectMapper;
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringConcatColCol;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringConcatColScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringConcatScalarCol;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.BytesWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFConcat.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java (working copy)
@@ -24,10 +24,10 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNull;
import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsNull;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFOPNull.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java (working copy)
@@ -26,11 +26,11 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterColOrScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprOrExpr;
import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterScalarOrColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for computing or.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput;
import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectOutput;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -41,6 +40,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java (working copy)
@@ -23,10 +23,10 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name = "struct",
value = "_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field values")
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
@Description(name = "max", value = "_FUNC_(expr) - Returns the maximum value of expr")
public class GenericUDAFMax extends AbstractGenericUDAFResolver {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java (working copy)
@@ -50,8 +50,8 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColNotEqualStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColNotEqualStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarNotEqualStringColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for operation Not EQUAL.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (working copy)
@@ -19,11 +19,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* Compute the standard deviation by extending GenericUDAFVariance and
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ConversionHelper;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFBridge encapsulates UDF to provide the same interface as
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTranslate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTranslate.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTranslate.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* TRANSLATE(string input, string from, string to) is an equivalent function to translate in
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNvl.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNvl.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNvl.java (working copy)
@@ -21,8 +21,8 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name = "nvl",
value = "_FUNC_(value,default_value) - Returns default value if value is null else returns value",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java (working copy)
@@ -24,9 +24,10 @@
import java.util.Collections;
import java.util.Iterator;
import java.util.Comparator;
+
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
@Description(name = "ewah_bitmap_empty", value = "_FUNC_(bitmap) - "
+ "Predicate that tests whether an EWAH-compressed bitmap is all zeros ")
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java (working copy)
@@ -26,11 +26,11 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterColAndScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprAndExpr;
import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterScalarAndColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for computing and.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFInline.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFInline.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFInline.java (working copy)
@@ -22,11 +22,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name ="inline", value= "_FUNC_( ARRAY( STRUCT()[,STRUCT()] "
+ "- explodes and array and struct into a table")
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java (working copy)
@@ -50,8 +50,8 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarEqualStringColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for operation EQUAL.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java (working copy)
@@ -22,11 +22,11 @@
import java.lang.reflect.Modifier;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hive.exception.HiveException;
/**
* common class for reflective UDFs
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java (working copy)
@@ -20,8 +20,8 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for SQL construct "COALESCE(a, b, c)".
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (working copy)
@@ -24,11 +24,11 @@
import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* A Generic User-defined function (GenericUDF) for the use with Hive.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java (working copy)
@@ -22,12 +22,12 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFField.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java (working copy)
@@ -26,12 +26,12 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.TaskExecutionException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDTFExplode.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* Computes an approximate percentile (quantile) from an approximate histogram, for very
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -39,6 +38,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* UDFDateSub.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnion.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnion.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnion.java (working copy)
@@ -23,11 +23,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name = "create_union", value = "_FUNC_(tag, obj1, obj2, obj3, ...)"
+ " - Creates a union with the object for given tag",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java (working copy)
@@ -19,10 +19,10 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for SQL construct
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java (working copy)
@@ -50,9 +50,9 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarGreaterStringColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for operation GreaterThan.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java (working copy)
@@ -22,10 +22,10 @@
import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* A Generic User-defined Table Generating Function (UDTF)
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLead.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLead.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLead.java (working copy)
@@ -1,7 +1,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hive.exception.HiveException;
@UDFType(impliesOrder = true)
public class GenericUDFLead extends GenericUDFLeadLag {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java (working copy)
@@ -23,13 +23,13 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* PosExplode.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput;
import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectOutput;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* An abstract class for a UDF that performs a binary operation between two EWAH-compressed bitmaps.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -40,6 +39,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* UDFDateDiff.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -30,6 +29,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFSize.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java (working copy)
@@ -20,7 +20,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -28,6 +27,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFToDate
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.SettableUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveCharObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hive.exception.HiveException;
@Description(name = "char",
value = "CAST( as CHAR(length)) - Converts the argument to a char value.",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* Computes an approximate histogram of a numerical column using a user-specified number of bins.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* Compute the Pearson correlation coefficient corr(x, y), using the following
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -40,6 +39,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
@Description(name = "decode",
value = "_FUNC_(bin, str) - Decode the first argument using the second argument character set",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* A simple generic udf to call java static functions via reflection.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRound.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRound.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRound.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.RoundWithNumDigitsDoubleToDouble;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -46,6 +45,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* Note: rounding function permits rounding off integer digits in decimal numbers, which essentially
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLower;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
/**
* UDFLower.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -31,6 +30,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* abstract class for Lead & lag UDAFs GenericUDAFLeadLag.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringUpper;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -34,6 +33,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* UDFUpper.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -48,6 +47,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Base Class for operations.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Estimates the top-k contextual n-grams in arbitrary sequential data using a heuristic.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -45,6 +44,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
+import org.apache.hive.exception.HiveException;
/**
* IF(expr1,expr2,expr3)
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNot.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNot.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNot.java (working copy)
@@ -24,11 +24,11 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.NotCol;
import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsFalse;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFOPNot.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription(description = @Description(name = "last_value", value = "_FUNC_(x)"), supportsWindow = true, pivotResult = false, impliesOrder = true)
public class GenericUDAFLastValue extends AbstractGenericUDAFResolver
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java (working copy)
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
@Description(name = "+", value = "_FUNC_ a - Returns a")
public class GenericUDFOPPositive extends GenericUDFBaseUnary {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLag.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLag.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLag.java (working copy)
@@ -1,7 +1,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hive.exception.HiveException;
@UDFType(impliesOrder = true)
public class GenericUDFLag extends GenericUDFLeadLag {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java (working copy)
@@ -22,9 +22,9 @@
import java.io.IOException;
import org.apache.hadoop.hive.ql.exec.MapredContext;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* A Generic User-defined aggregation function (GenericUDAF) for the use with
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -34,6 +33,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFIn
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java (working copy)
@@ -30,11 +30,11 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* IN_FILE(str, filename) returns true if 'str' appears in the file specified
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java (working copy)
@@ -24,12 +24,12 @@
import java.util.HashSet;
import java.util.List;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
+import org.apache.hive.exception.HiveException;
public class GenericUDAFMkCollectionEvaluator extends GenericUDAFEvaluator
implements Serializable {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java (working copy)
@@ -22,13 +22,13 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for string function LOCATE(substr, str),
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -34,6 +33,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
public abstract class GenericUDFFloorCeilBase extends GenericUDF {
private final String opName;
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -39,6 +38,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* Compute the covariance covar_pop(x, y), using the following one-pass method
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java (working copy)
@@ -28,7 +28,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDTFParseUrlTuple: this
*
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (working copy)
@@ -19,11 +19,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* Compute the sample standard deviation by extending GenericUDAFVariance and
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMapValues.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMapValues.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMapValues.java (working copy)
@@ -24,11 +24,11 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFMapValues.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -31,6 +30,7 @@
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
@Description(name = "-", value = "_FUNC_ a - Returns -a")
@VectorizedExpressions({LongColUnaryMinus.class, DoubleColUnaryMinus.class})
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java (working copy)
@@ -22,11 +22,11 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToTimestampViaDoubleToLong;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToTimestampViaLongToLong;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
*
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java (working copy)
@@ -24,7 +24,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
@@ -38,6 +37,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDAFSum.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java (working copy)
@@ -24,7 +24,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java (working copy)
@@ -20,9 +20,9 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.exception.HiveException;
@UDFType(deterministic = false)
@Description(name = "unix_timestamp",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMapKeys.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMapKeys.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMapKeys.java (working copy)
@@ -24,11 +24,11 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFMapKeys.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java (working copy)
@@ -29,13 +29,13 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampLong;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampString;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* deterministic version of UDFUnixTimeStamp. enforces argument
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java (working copy)
@@ -20,13 +20,13 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFArrayContains.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java (working copy)
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.ql.exec.UDTFOperator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* UDTFCollector collects data from a GenericUDTF and passes the data to a
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name = "binary", value = "_FUNC_(a) - cast a to binary",
extended = "Currently only string or binary can be cast into binary")
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java (working copy)
@@ -23,13 +23,13 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* Takes a row of size k of data and splits it into n rows of data. For
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java (working copy)
@@ -25,11 +25,11 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
@Description(name = "named_struct",
value = "_FUNC_(name1, val1, name2, val2, ...) - Creates a struct with the given " +
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (working copy)
@@ -19,11 +19,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* Compute the sample variance by extending GenericUDAFVariance and overriding
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -46,6 +45,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDAFComputeStats
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDAF;
import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ConversionHelper;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.exception.HiveException;
/**
* This class is a bridge between GenericUDAF and UDAF. Old UDAF can be used
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for format_number function
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public class GenericUDFFromUtcTimestamp extends GenericUDF {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java (working copy)
@@ -20,12 +20,12 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public abstract class GenericUDFBaseTrim extends GenericUDF {
private transient TextConverter converter;
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -37,6 +36,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* UDFDate.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java (working copy)
@@ -21,9 +21,9 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for operation LessThan.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java (working copy)
@@ -22,12 +22,12 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for string function ELT(N,str1,str2,str3,...). This
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for array sort
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.ql.exec.PTFUtils;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
public abstract class GenericUDFLeadLag extends GenericUDF {
transient ExprNodeEvaluator exprEvaluator;
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -40,6 +39,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* Compute the variance. This class is extended by: GenericUDAFVarianceSample
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java (working copy)
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
/**
* Collector gets data from a source.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java (working copy)
@@ -50,9 +50,9 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterEqualStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColGreaterEqualStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarGreaterEqualStringColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for operation EqualOrGreaterThan.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEncode.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEncode.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEncode.java (working copy)
@@ -29,7 +29,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -42,6 +41,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
@Description(name = "encode",
value = "_FUNC_(str, str) - Encode the first argument using the second argument character set",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java (working copy)
@@ -22,13 +22,13 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for string function INSTR(str,substr). This mimcs
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java (working copy)
@@ -26,13 +26,13 @@
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFMacro wraps a user-defined macro expression into a GenericUDF
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java (working copy)
@@ -50,9 +50,9 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessEqualStringColumn;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessEqualStringScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarLessEqualStringColumn;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for operation EqualOrLessThan.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
@@ -46,6 +45,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDAFAverage.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java (working copy)
@@ -19,11 +19,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.exception.HiveException;
/**
* Compute the sample covariance by extending GenericUDAFCovariance and overriding
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java (working copy)
@@ -24,12 +24,12 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -34,6 +33,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for string function
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java (working copy)
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -39,6 +38,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Base Class for operations.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java (working copy)
@@ -21,11 +21,11 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for computing hash values.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
@Description(name = "min", value = "_FUNC_(expr) - Returns the minimum value of expr")
public class GenericUDAFMin extends AbstractGenericUDAFResolver {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -39,6 +38,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
/**
* UDFDateAdd.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java (working copy)
@@ -25,12 +25,12 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFSentences: splits a natural language chunk of text into sentences and words.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.exception.HiveException;
/**
* This class implements the COUNT aggregation function as in SQL.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java (working copy)
@@ -21,11 +21,11 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
@Description(name = "between", value = "_FUNC_ a [NOT] BETWEEN b AND c - evaluate if a is [not] in between b and c")
public class GenericUDFBetween extends GenericUDF {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java (working copy)
@@ -24,10 +24,10 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNotNull;
import org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsNotNull;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFOPNotNull.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java (working copy)
@@ -23,12 +23,12 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFSplit.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble;
import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerLongToDouble;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
@Description(name = "power,pow",
value = "_FUNC_(x1, x2) - raise x1 to the power of x2",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java (working copy)
@@ -21,7 +21,6 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -30,6 +29,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
public abstract class GenericUDFBasePad extends GenericUDF {
private transient Converter converter1;
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java (working copy)
@@ -20,7 +20,6 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.SettableUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -29,6 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.exception.HiveException;
@Description(name = "decimal", value = "_FUNC_(a) - cast a to decimal")
public class GenericUDFToDecimal extends GenericUDF implements SettableUDF {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java (working copy)
@@ -22,16 +22,14 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-
import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFAssertTrue
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java (working copy)
@@ -26,7 +26,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -42,6 +41,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Generic UDF for printf function
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java (working copy)
@@ -27,7 +27,6 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -48,6 +47,7 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+import org.apache.hive.exception.HiveException;
/**
* A simple generic udf to call java functions via reflection.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualNS.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualNS.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualNS.java (working copy)
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
@Description(name = "<=>", value = "a _FUNC_ b - Returns same result with EQUAL(=) operator " +
"for non-null operands, but returns TRUE if both are NULL, FALSE if one of the them is NULL")
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java (working copy)
@@ -19,10 +19,10 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDF Class for SQL construct "CASE a WHEN b THEN c [ELSE f] END".
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java (working copy)
@@ -24,13 +24,13 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFMap.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java (working copy)
@@ -25,12 +25,12 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
+import org.apache.hive.exception.HiveException;
@WindowFunctionDescription
(
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java (working copy)
@@ -23,13 +23,13 @@
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hive.exception.HiveException;
/**
* GenericUDFArray.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -36,6 +35,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
/**
* Estimates the top-k n-grams in arbitrary sequential data using a heuristic.
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.SettableUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -32,6 +31,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hive.exception.HiveException;
@Description(name = "varchar",
value = "CAST( as VARCHAR(length)) - Converts the argument to a varchar value.",
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java (working copy)
@@ -23,13 +23,13 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Based on Hive {@link GenericUDAFResolver}. Break up the responsibility of the
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java (working copy)
@@ -22,11 +22,11 @@
import org.apache.hadoop.hive.ql.exec.PTFPartition;
import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
import org.apache.hadoop.hive.ql.exec.PTFUtils;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
/**
* Based on Hive {@link GenericUDAFEvaluator}. Break up the responsibility of the old AsbtractTableFunction
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.hadoop.hive.ql.exec.PTFOperator;
import org.apache.hadoop.hive.ql.exec.PTFPartition;
import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec;
@@ -45,6 +44,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hive.exception.HiveException;
@SuppressWarnings("deprecation")
public class WindowingTableFunction extends TableFunctionEvaluator {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java (working copy)
@@ -21,11 +21,11 @@
import java.util.ArrayList;
import org.apache.hadoop.hive.ql.exec.PTFPartition;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
public class NoopWithMap extends Noop
{
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java (working copy)
@@ -28,8 +28,6 @@
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.PTFPartition;
import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.PTFTranslator;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
@@ -55,6 +53,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.exception.HiveException;
+import org.apache.hive.parse.ASTNode;
/**
* return rows that meet a specified pattern. Use symbols to specify a list of expressions
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java (working copy)
@@ -22,11 +22,11 @@
import org.apache.hadoop.hive.ql.exec.PTFPartition;
import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hive.exception.HiveException;
public class Noop extends TableFunctionEvaluator {
Index: ql/src/java/org/apache/hadoop/hive/ql/udf/xml/GenericUDFXPath.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/udf/xml/GenericUDFXPath.java (revision 1560846)
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/xml/GenericUDFXPath.java (working copy)
@@ -25,7 +25,6 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -33,6 +32,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import org.apache.hive.exception.HiveException;
import org.w3c.dom.NodeList;
@Description(
Index: ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt
===================================================================
--- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt (revision 1560846)
+++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt (working copy)
@@ -29,7 +29,7 @@
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
Index: ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt
===================================================================
--- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt (revision 1560846)
+++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt (working copy)
@@ -25,7 +25,7 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.io.LongWritable;
Index: ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt
===================================================================
--- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt (revision 1560846)
+++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt (working copy)
@@ -28,7 +28,7 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.io.LongWritable;
Index: ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt
===================================================================
--- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt (revision 1560846)
+++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt (working copy)
@@ -27,7 +27,7 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
Index: ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
===================================================================
--- ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt (revision 1560846)
+++ ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt (working copy)
@@ -28,7 +28,7 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hive.exception.HiveException;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.io.LongWritable;
Index: ql/pom.xml
===================================================================
--- ql/pom.xml (revision 1560846)
+++ ql/pom.xml (working copy)
@@ -107,16 +107,6 @@
${log4j.version}
- org.antlr
- antlr-runtime
- ${antlr.version}
-
-
- org.antlr
- ST4
- ${ST4.version}
-
-
org.apache.avro
avro
${avro.version}