diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 6a62592..3a64a7d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -436,6 +436,15 @@ HIVE_GROUPING_SETS_AGGR_NOMAPAGGR_MULTIGBY(10315, "Grouping sets aggregations (with rollups or cubes) are not allowed when " + "HIVEMULTIGROUPBYSINGLEREDUCER is turned on. Set hive.multigroupby.singlereducer=false if you want to use grouping sets"), + CANNOT_RETRIEVE_TABLE_METADATA(10316, "Error while retrieving table metadata"), + CANNOT_DROP_INDEX(10317, "Error while dropping index"), + INVALID_AST_TREE(10318, "Internal error : Invalid AST"), + ERROR_SERIALIZE_METASTORE(10319, "Error while serializing the metastore objects"), + IO_ERROR(10320, "Error while peforming IO operation "), + ERROR_SERIALIZE_METADATA(10319, "Error while serializing the metadata"), + GENERIC_SEMANTIC_ERROR(10320, "Generic Semantic error"), + INVALID_LOAD_TABLE_FILE_WORK(10321, "Invalid Load Table Work or Load File Work"), + CLASSPATH_ERROR(10322, "Classpath error"), //========================== 20000 range starts here ========================// SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index cead5ae..9182e3e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -779,7 +779,7 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(ast .getChild(0)), ite); } catch (HiveException e) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(ast + throw new SemanticException(ErrorMsg.CANNOT_RETRIEVE_TABLE_METADATA.getMsg(ast .getChild(childIndex), e.getMessage()), e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 5e6b606..3614467 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1181,7 +1181,7 @@ private void analyzeDropIndex(ASTNode ast) throws SemanticException { Index idx = db.getIndex(tableName, indexName); } catch (HiveException e) { if (!(e.getCause() instanceof NoSuchObjectException)) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("dropping index"), e); + throw new SemanticException(ErrorMsg.CANNOT_DROP_INDEX.getMsg("dropping index"), e); } if (throwException) { throw new SemanticException(ErrorMsg.INVALID_INDEX.getMsg(indexName)); @@ -2091,7 +2091,7 @@ private void analyzeShowTables(ASTNode ast) throws SemanticException { String tableNames = null; if (ast.getChildCount() > 3) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg()); + throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(ast.toStringTree())); } switch (ast.getChildCount()) { @@ -2156,7 +2156,8 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { } else if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { partSpec = getValidatedPartSpec(getTable(tableNames), child, conf, false); } else { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg()); + throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(child.toStringTree() + + " , Invalid token " + child.getToken().getType())); } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index 179f9c2..a3fcaa0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -122,7 +122,7 @@ static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticExceptio throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e); } } catch (IOException e) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e); + throw new SemanticException(ErrorMsg.IO_ERROR.getMsg(), e); } } @@ -239,8 +239,8 @@ public static void createExportDump(FileSystem fs, Path metadataPath, jgen.writeEndArray(); } catch (TException e) { throw new SemanticException( - ErrorMsg.GENERIC_ERROR - .getMsg("Exception while serializing the metastore objects"), e); + ErrorMsg.ERROR_SERIALIZE_METASTORE + .getMsg(), e); } } jgen.writeEndObject(); @@ -318,9 +318,9 @@ public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) return new ReadMetaData(table, partitionsList,readReplicationSpec(jsonContainer)); } catch (JSONException e) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e); + throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e); } catch (TException e) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e); + throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e); } finally { if (mdstream != null) { mdstream.close(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java index ff385d0..fe8147a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -166,7 +166,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { + " and then copied to " + toURI.toString()); } catch (Exception e) { throw new SemanticException( - ErrorMsg.GENERIC_ERROR + ErrorMsg.IO_ERROR .getMsg("Exception while writing out the local file"), e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 88c4b95..30a47a8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -237,7 +237,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { } catch (SemanticException e) { throw e; } catch (Exception e) { - throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e); + throw new SemanticException(ErrorMsg.GENERIC_SEMANTIC_ERROR.getMsg(), e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index a8f9f50..89897d7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -108,7 +108,7 @@ public void compile(final ParseContext pCtx, final List