diff --git contrib/src/test/results/clientnegative/serde_regex.q.out contrib/src/test/results/clientnegative/serde_regex.q.out index c51567f..bbac666 100644 --- contrib/src/test/results/clientnegative/serde_regex.q.out +++ contrib/src/test/results/clientnegative/serde_regex.q.out @@ -80,5 +80,4 @@ WITH SERDEPROPERTIES ( ) STORED AS TEXTFILE PREHOOK: type: CREATETABLE -#### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.contrib.serde2.RegexSerDe only accepts string columns, but column[5] named status has type int) diff --git contrib/src/test/results/clientnegative/url_hook.q.out contrib/src/test/results/clientnegative/url_hook.q.out index a4aa64f..601fd93 100644 --- contrib/src/test/results/clientnegative/url_hook.q.out +++ contrib/src/test/results/clientnegative/url_hook.q.out @@ -5,5 +5,4 @@ POSTHOOK: type: SHOWTABLES src PREHOOK: query: SHOW TABLES 'src' PREHOOK: type: SHOWTABLES -FAILED: Error in metadata: ERROR: The database default does not exist. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: default diff --git hcatalog/src/test/e2e/templeton/tests/ddl.conf hcatalog/src/test/e2e/templeton/tests/ddl.conf index 518a94c..812d93c 100644 --- hcatalog/src/test/e2e/templeton/tests/ddl.conf +++ hcatalog/src/test/e2e/templeton/tests/ddl.conf @@ -299,7 +299,7 @@ $cfg = 'method' => 'GET', 'url' => ':TEMPLETON_URL:/templeton/v1/ddl/database/templeton_testdb1/table/templeton_testtab1?user.name=:UNAME:', 'status_code' => 404, - 'json_field_substr_match' => {'error' => 'templeton_testtab1 does not exist'}, + 'json_field_substr_match' => {'error' => 'FAILED: SemanticException \[Error 10001\]: Table not found templeton_testtab1'}, }, { #-ve test , describe on non existent table @@ -307,7 +307,7 @@ $cfg = 'method' => 'GET', 'url' => ':TEMPLETON_URL:/templeton/v1/ddl/database/templeton_testdb1/table/templeton_testtab1?user.name=:UNAME:&format=extended:', 'status_code' => 404, - 'json_field_substr_match' => {'error' => 'templeton_testtab1 does not exist'}, + 'json_field_substr_match' => {'error' => 'FAILED: SemanticException \[Error 10001\]: Table not found templeton_testtab1'}, }, ] @@ -735,7 +735,7 @@ STORED AS rcfile 'status_code' => 404, 'json_field_substr_match' => { - 'error' => 'Partition.*for table templetontest_parts does not exist' + 'error' => 'FAILED: SemanticException \[Error 10006\]: Partition not found \{dt=20120101\, country=IN\}' }, }, diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java index 7bf9ede..8c143a8 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java @@ -116,6 +116,7 @@ public static final String HADOOP_SPECULATIVE_NAME = "mapred.map.tasks.speculative.execution"; public static final String HADOOP_CHILD_JAVA_OPTS = "mapred.child.java.opts"; + public static final String UNIT_TEST_MODE = "templeton.unit.test.mode"; private static final Log LOG = LogFactory.getLog(AppConfig.class); diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BadParam.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BadParam.java index d734a30..50f0b75 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BadParam.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BadParam.java @@ -18,11 +18,13 @@ */ package org.apache.hcatalog.templeton; +import org.eclipse.jetty.http.HttpStatus; + /** * Missing required or badly configured paramater. */ public class BadParam extends SimpleWebException { public BadParam(String msg) { - super(400, msg); + super(HttpStatus.BAD_REQUEST_400, msg); } } diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BusyException.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BusyException.java index c9a04ab..0ce15dd 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BusyException.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/BusyException.java @@ -18,11 +18,13 @@ */ package org.apache.hcatalog.templeton; +import org.eclipse.jetty.http.HttpStatus; + /** * Simple "we are busy, try again" exception. */ public class BusyException extends SimpleWebException { public BusyException() { - super(503, "Busy, please retry"); + super(HttpStatus.SERVICE_UNAVAILABLE_503, "Busy, please retry"); } } diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CallbackFailedException.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CallbackFailedException.java index fb6a422..7c6f10d 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CallbackFailedException.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CallbackFailedException.java @@ -18,11 +18,13 @@ */ package org.apache.hcatalog.templeton; +import org.eclipse.jetty.http.HttpStatus; + /** * The callback failed when it tried to reach the callback URL. */ public class CallbackFailedException extends SimpleWebException { public CallbackFailedException(String msg) { - super(400, msg); + super(HttpStatus.BAD_REQUEST_400, msg); } } diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java index 3ff570f..49c603f 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.eclipse.jetty.http.HttpStatus; /** * Map all exceptions to the Jersey response. This lets us have nice @@ -36,6 +37,6 @@ public Response toResponse(Exception e) { LOG.error(e.getMessage(), e); - return SimpleWebException.buildMessage(500, null, e.getMessage()); + return SimpleWebException.buildMessage(HttpStatus.INTERNAL_SERVER_ERROR_500, null, e.getMessage()); } } diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatDelegator.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatDelegator.java index 6dbf9e2..a364992 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatDelegator.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatDelegator.java @@ -33,7 +33,9 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hcatalog.templeton.tool.TempletonUtils; +import org.eclipse.jetty.http.HttpStatus; /** @@ -95,7 +97,15 @@ public ExecBean run(String user, String exec, boolean format, args.add("-D"); args.add("hive.format=json"); } - + LOG.info("Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE)=" + + Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE)); + if(System.getProperty("hive.metastore.warehouse.dir") != null) { + /*when running in unit test mode, pass this property to HCat, + which will in turn pass it to Hive to make sure that Hive + tries to write to a directory that exists.*/ + args.add("-D"); + args.add("hive.metastore.warehouse.dir=" + System.getProperty("hive.metastore.warehouse.dir")); + } return args; } @@ -113,12 +123,6 @@ public Response descDatabase(String user, String db, boolean extended) String res = jsonRun(user, exec); return JsonBuilder.create(res).build(); } catch (HcatException e) { - if (e.execBean.stderr.contains("SemanticException")) { - return JsonBuilder.create(). - put("error", "Database " + db + " does not exist") - .put("errorCode", "404") - .put("database", db).build(); - } throw new HcatException("unable to describe database: " + db, e.execBean, exec); } @@ -256,14 +260,6 @@ public Response descTable(String user, String db, String table, boolean extended .put("table", table) .build(); } catch (HcatException e) { - if (e.execBean.stderr.contains("SemanticException") && - e.execBean.stderr.contains("Table not found")) { - return JsonBuilder.create(). - put("error", "Table" + db + "." + table + " does not exist" ) - .put("errorCode", "404") - .put("table", table) - .put("database", db).build(); - } throw new HcatException("unable to describe database: " + db, e.execBean, exec); } @@ -470,8 +466,8 @@ private String singleTable(String json, String table) return JsonBuilder.mapToJson(tables.get(0)); else { return JsonBuilder - .createError(String.format("Table %s does not exist", table), - JsonBuilder.MISSING). + .createError(ErrorMsg.INVALID_TABLE.format(table), + ErrorMsg.INVALID_TABLE.getErrorCode()). buildJson(); } } @@ -530,7 +526,7 @@ public Response descTableProperty(String user, String db, throws HcatException, NotAuthorizedException, BusyException, ExecuteException, IOException { Response res = descTable(user, db, table, true); - if (res.getStatus() != JsonBuilder.OK) + if (res.getStatus() != HttpStatus.OK_200) return res; Map props = tableProperties(res.getEntity()); Map found = null; @@ -556,7 +552,7 @@ public Response listTableProperties(String user, String db, String table) throws HcatException, NotAuthorizedException, BusyException, ExecuteException, IOException { Response res = descTable(user, db, table, true); - if (res.getStatus() != JsonBuilder.OK) + if (res.getStatus() != HttpStatus.OK_200) return res; Map props = tableProperties(res.getEntity()); return JsonBuilder.create() @@ -645,7 +641,8 @@ public Response descOnePartition(String user, String db, String table, + table + " does not exist" + db + "." + table + " does not exist"; return JsonBuilder.create() .put("error", emsg) - .put("errorCode", "404") + //this error should really be produced by Hive (DDLTask) + .put("errorCode", ErrorMsg.INVALID_PARTITION.getErrorCode()) .put("database", db) .put("table", table) .put("partition", partition) @@ -678,7 +675,8 @@ public Response addOnePartition(String user, String db, String table, if (res.indexOf("AlreadyExistsException") > -1) { return JsonBuilder.create(). put("error", "Partition already exists") - .put("errorCode", "409") + //This error code should really be produced by Hive + .put("errorCode", ErrorMsg.PARTITION_EXISTS.getErrorCode()) .put("database", db) .put("table", table) .put("partition", desc.partition).build(); @@ -742,13 +740,13 @@ public Response descOneColumn(String user, String db, String table, String colum throws SimpleWebException, NotAuthorizedException, BusyException, ExecuteException, IOException { Response res = listColumns(user, db, table); - if (res.getStatus() != JsonBuilder.OK) + if (res.getStatus() != HttpStatus.OK_200) return res; Object o = res.getEntity(); final Map fields = (o != null && (o instanceof Map)) ? (Map) o : null; if (fields == null) - throw new SimpleWebException(500, "Internal error, unable to find column " + throw new SimpleWebException(HttpStatus.INTERNAL_SERVER_ERROR_500, "Internal error, unable to find column " + column); @@ -763,7 +761,7 @@ public Response descOneColumn(String user, String db, String table, String colum } } if (found == null) - throw new SimpleWebException(500, "unable to find column " + column, + throw new SimpleWebException(HttpStatus.INTERNAL_SERVER_ERROR_500, "unable to find column " + column, new HashMap() { { put("description", fields); diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatException.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatException.java index 388578a..dae71f7 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatException.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/HcatException.java @@ -18,6 +18,8 @@ */ package org.apache.hcatalog.templeton; +import org.eclipse.jetty.http.HttpStatus; + import java.util.HashMap; /** @@ -28,7 +30,7 @@ public String statement; public HcatException(String msg, final ExecBean bean, final String statement) { - super(500, msg, new HashMap() { + super(HttpStatus.INTERNAL_SERVER_ERROR_500, msg, new HashMap() { { put("exec", bean); put("statement", statement); diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/JsonBuilder.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/JsonBuilder.java index 55be07a..7d01cbb 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/JsonBuilder.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/JsonBuilder.java @@ -25,17 +25,34 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hcatalog.templeton.tool.TempletonUtils; import org.codehaus.jackson.map.ObjectMapper; +import org.eclipse.jetty.http.HttpStatus; /** * Helper class to build new json objects with new top level * properties. Only add non-null entries. */ public class JsonBuilder { - static final int OK = 200; - static final int MISSING = 404; - static final int SERVER_ERROR = 500; + private static final Map hiveError2HttpStatusCode = new HashMap(); + + /** + * It's expected that Hive (and thus HCat CLI) will return canonical error msgs/codes. + * Here they are mapped to appropriate HTTP Status Code. + */ + static { + hiveError2HttpStatusCode.put(ErrorMsg.GENERIC_ERROR.getErrorCode(), HttpStatus.INTERNAL_SERVER_ERROR_500); + hiveError2HttpStatusCode.put(ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(), HttpStatus.NOT_FOUND_404); + hiveError2HttpStatusCode.put(ErrorMsg.INVALID_TABLE.getErrorCode(), HttpStatus.NOT_FOUND_404); + hiveError2HttpStatusCode.put(ErrorMsg.TABLE_NOT_PARTITIONED.getErrorCode(), HttpStatus.NOT_FOUND_404); + hiveError2HttpStatusCode.put(ErrorMsg.INVALID_PARTITION.getErrorCode(), HttpStatus.NOT_FOUND_404); + + hiveError2HttpStatusCode.put(ErrorMsg.DUPLICATE_COLUMN_NAMES.getErrorCode(), HttpStatus.CONFLICT_409); + hiveError2HttpStatusCode.put(ErrorMsg.DATABSAE_ALREADY_EXISTS.getErrorCode(), HttpStatus.CONFLICT_409); + hiveError2HttpStatusCode.put(ErrorMsg.PARTITION_EXISTS.getErrorCode(), HttpStatus.CONFLICT_409); + hiveError2HttpStatusCode.put(ErrorMsg.TABLE_ALREADY_EXISTS.getErrorCode(), HttpStatus.CONFLICT_409); + } // The map we're building. private Map map; @@ -65,11 +82,11 @@ public static JsonBuilder create() /** * Create a new map error object. */ - public static JsonBuilder createError(String msg, int code) + public static JsonBuilder createError(String msg, int errorCode) throws IOException { return new JsonBuilder(null) .put("error", msg) - .put("errorCode", code); + .put("errorCode", errorCode); } /** @@ -115,16 +132,13 @@ public String buildJson() * Turn the map back to response object. */ public Response buildResponse() { - int status = OK; // Server ok. + int status = HttpStatus.OK_200; // Server ok. if (map.containsKey("error")) - status = SERVER_ERROR; // Generic http server error. + status = HttpStatus.INTERNAL_SERVER_ERROR_500; // Generic http server error. Object o = map.get("errorCode"); if (o != null) { - try { - status = Integer.parseInt(o.toString()); - } catch (Exception e) { - if (o instanceof Number) - status = ((Number) o).intValue(); + if(hiveError2HttpStatusCode.containsKey(o)) { + status = hiveError2HttpStatusCode.get(o); } } return buildResponse(status); diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Main.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Main.java index c074452..3761228 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Main.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Main.java @@ -50,6 +50,7 @@ private static final Log LOG = LogFactory.getLog(Main.class); public static final int DEFAULT_PORT = 8080; + private Server server; private static volatile AppConfig conf; @@ -62,7 +63,7 @@ public static synchronized AppConfig getAppConfigInstance() { return conf; } - public Main(String[] args) { + Main(String[] args) { init(args); } @@ -115,6 +116,17 @@ public void run() { System.exit(1); } } + void stop() { + if(server != null) { + try { + server.stop(); + } + catch(Exception ex) { + LOG.warn("Failed to stop jetty.Server", ex); + } + } + } + private void checkEnv() { checkCurrentDirPermissions(); @@ -179,6 +191,7 @@ public Server runServer(int port) // Start the server server.start(); + this.server = server; return server; } diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/NotAuthorizedException.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/NotAuthorizedException.java index 3ebae20..3245b17 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/NotAuthorizedException.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/NotAuthorizedException.java @@ -18,11 +18,13 @@ */ package org.apache.hcatalog.templeton; +import org.eclipse.jetty.http.HttpStatus; + /** * Simple "user not found" type exception. */ public class NotAuthorizedException extends SimpleWebException { public NotAuthorizedException(String msg) { - super(401, msg); + super(HttpStatus.UNAUTHORIZED_401, msg); } } diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java index a7f35f9..8fd09e1 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java @@ -18,12 +18,14 @@ */ package org.apache.hcatalog.templeton; +import org.eclipse.jetty.http.HttpStatus; + /** * Unable to queue the job */ public class QueueException extends SimpleWebException { public QueueException(String msg) { - super(500, msg); + super(HttpStatus.INTERNAL_SERVER_ERROR_500, msg); } } diff --git hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java new file mode 100644 index 0000000..ceb98b5 --- /dev/null +++ hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java @@ -0,0 +1,273 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hcatalog.templeton; + +import junit.framework.Assert; +import org.apache.commons.httpclient.HttpClient; +import org.apache.commons.httpclient.HttpMethod; +import org.apache.commons.httpclient.NameValuePair; +import org.apache.commons.httpclient.methods.DeleteMethod; +import org.apache.commons.httpclient.methods.GetMethod; +import org.apache.commons.httpclient.methods.PutMethod; +import org.apache.commons.httpclient.methods.StringRequestEntity; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.eclipse.jetty.http.HttpStatus; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * A set of tests exercising e2e WebHCat DDL APIs. These tests are somewhat + * between WebHCat e2e (hcatalog/src/tests/e2e/templeton) tests and simple58 + * + * unit tests. This will start a WebHCat server and make REST calls to it. + * It doesn't need Hadoop or (standalone) metastore to be running. + * Running this is much simpler than e2e tests. + * + * Most of these tests check that HTTP Status code is what is expected and + * Hive Error code {@link org.apache.hadoop.hive.ql.ErrorMsg} is what is + * expected. + * + * It may be possible to extend this to more than just DDL later. + */ +public class TestWebHCatE2e { + private static final Logger LOG = + LoggerFactory.getLogger(TestWebHCatE2e.class); + private static final String templetonBaseUrl = + "http://localhost:50111/templeton/v1"; + private static final String username= "johndoe"; + private static final String ERROR_CODE = "errorCode"; + private static Main templetonServer; + private static final String charSet = "UTF-8"; + @BeforeClass + public static void startHebHcatInMem() { + templetonServer = new Main(new String[] {"-D" + AppConfig.UNIT_TEST_MODE + "=true"}); + LOG.info("Starting Main"); + templetonServer.run(); + LOG.info("Main started"); + } + @AfterClass + public static void stopWebHcatInMem() { + if(templetonServer != null) { + LOG.info("Stopping Main"); + templetonServer.stop(); + LOG.info("Main stopped"); + } + } + @Test + public void getStatus() throws IOException { + LOG.debug("+getStatus()"); + MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/status", HTTP_METHOD_TYPE.GET); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.OK_200, p.httpStatusCode); + Assert.assertEquals(p.getAssertMsg(), "{\"status\":\"ok\",\"version\":\"v1\"}", p.responseBody); + LOG.debug("-getStatus()"); + } + @Test + public void listDataBases() throws IOException { + LOG.debug("+listDataBases()"); + MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/ddl/database", HTTP_METHOD_TYPE.GET); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.OK_200, p.httpStatusCode); + Assert.assertEquals(p.getAssertMsg(), "{\"databases\":[\"default\"]}", p.responseBody); + LOG.debug("-listDataBases()"); + } + /** + * Check that we return correct status code when the URL doesn't map to any method + * in {@link Server} + */ + @Test + public void invalidPath() throws IOException { + MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/no_such_mapping/database", HTTP_METHOD_TYPE.GET); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.INTERNAL_SERVER_ERROR_500, p.httpStatusCode); + } + /** + * tries to drop table in a DB that doesn't exist + */ + @Test + public void dropTableNoSuchDB() throws IOException { + MethodCallRetVal p = doHttpCall(templetonBaseUrl + + "/ddl/database/no_such_db/table/t1", HTTP_METHOD_TYPE.DELETE); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.NOT_FOUND_404, p.httpStatusCode); + Assert.assertEquals(p.getAssertMsg(), + ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(), + getErrorCode(p.responseBody)); + } + /** + * tries to drop table in a DB that doesn't exist + */ + @Test + public void dropTableNoSuchDbIfExists() throws IOException { + MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/ddl/database/no_such_db/table/t1", + HTTP_METHOD_TYPE.DELETE, null, new NameValuePair[] + {new NameValuePair("ifExists", "true")}); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.NOT_FOUND_404, p.httpStatusCode); + Assert.assertEquals(p.getAssertMsg(), ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(), getErrorCode(p.responseBody)); + } + /** + * tries to drop table that doesn't exist (with ifExists=true) + */ + @Test + public void dropTableIfExists() throws IOException { + MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/ddl/database/default/table/no_such_table", + HTTP_METHOD_TYPE.DELETE, null, new NameValuePair[] + {new NameValuePair("ifExists", "true")}); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.OK_200, p.httpStatusCode); + } + + @Test + public void createDataBase() throws IOException { + Map props = new HashMap(); + props.put("comment", "Hello, there"); + props.put("location", "file://" + System.getProperty("hive.metastore.warehouse.dir")); + Map props2 = new HashMap(); + props2.put("prop", "val"); + props.put("properties", props2); + //{ "comment":"Hello there", "location":"file:///tmp/warehouse", "properties":{"a":"b"}} + MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/ddl/database/newdb", HTTP_METHOD_TYPE.PUT, props, null); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.OK_200, p.httpStatusCode); + } + @Test + public void createTable() throws IOException { + //{ "comment":"test", "columns": [ { "name": "col1", "type": "string" } ], "format": { "storedAs": "rcfile" } } + Map props = new HashMap(); + props.put("comment", "Table in default db"); + Map col = new HashMap(); + col.put("name", "col1"); + col.put("type", "string"); + List> colList = new ArrayList>(1); + colList.add(col); + props.put("columns", colList); + Map format = new HashMap(); + format.put("storedAs", "rcfile"); + props.put("format", format); + MethodCallRetVal createTbl = doHttpCall(templetonBaseUrl + "/ddl/database/default/table/test_table", HTTP_METHOD_TYPE.PUT, props, null); + Assert.assertEquals(createTbl.getAssertMsg(), HttpStatus.OK_200, createTbl.httpStatusCode); + LOG.info("createTable() resp: " + createTbl.responseBody); + + MethodCallRetVal descTbl = doHttpCall(templetonBaseUrl + "/ddl/database/default/table/test_table", HTTP_METHOD_TYPE.GET); + Assert.assertEquals(descTbl.getAssertMsg(), HttpStatus.OK_200, descTbl.httpStatusCode); + } + @Test + public void describeNoSuchTable() throws IOException { + MethodCallRetVal p = doHttpCall(templetonBaseUrl + + "/ddl/database/default/table/no_such_table", HTTP_METHOD_TYPE.GET); + Assert.assertEquals(p.getAssertMsg(), HttpStatus.NOT_FOUND_404, + p.httpStatusCode); + Assert.assertEquals(p.getAssertMsg(), + ErrorMsg.INVALID_TABLE.getErrorCode(), + getErrorCode(p.responseBody)); + } + /** + * It's expected that Templeton returns a properly formatted JSON object when it + * encounters an error. It should have {@code ERROR_CODE} element in it which + * should be the Hive canonical error msg code. + * @return the code or -1 if it cannot be found + */ + private static int getErrorCode(String jsonErrorObject) throws IOException { + @SuppressWarnings("unchecked")//JSON key is always a String + Map retProps = JsonBuilder.jsonToMap(jsonErrorObject + "blah blah"); + int hiveRetCode = -1; + if(retProps.get(ERROR_CODE) !=null) { + hiveRetCode = Integer.parseInt(retProps.get(ERROR_CODE).toString()); + } + return hiveRetCode; + } + /** + * Encapsulates information from HTTP method call + */ + private static class MethodCallRetVal { + private final int httpStatusCode; + private final String responseBody; + private final String submittedURL; + private final String methodName; + private MethodCallRetVal(int httpStatusCode, String responseBody, String submittedURL, String methodName) { + this.httpStatusCode = httpStatusCode; + this.responseBody = responseBody; + this.submittedURL = submittedURL; + this.methodName = methodName; + } + String getAssertMsg() { + return methodName + " " + submittedURL + " " + responseBody; + } + } + private static enum HTTP_METHOD_TYPE {GET, POST, DELETE, PUT} + private static MethodCallRetVal doHttpCall(String uri, HTTP_METHOD_TYPE type) throws IOException { + return doHttpCall(uri, type, null, null); + } + /** + * Does a basic HTTP GET and returns Http Status code + response body + * Will add the dummy user query string + */ + private static MethodCallRetVal doHttpCall(String uri, HTTP_METHOD_TYPE type, Map data, NameValuePair[] params) throws IOException { + HttpClient client = new HttpClient(); + HttpMethod method; + switch (type) { + case GET: + method = new GetMethod(uri); + break; + case DELETE: + method = new DeleteMethod(uri); + break; + case PUT: + method = new PutMethod(uri); + if(data == null) { + break; + } + String msgBody = JsonBuilder.mapToJson(data); + LOG.info("Msg Body: " + msgBody); + StringRequestEntity sre = new StringRequestEntity(msgBody, "application/json", charSet); + ((PutMethod)method).setRequestEntity(sre); + break; + default: + throw new IllegalArgumentException("Unsupported method type: " + type); + } + if(params == null) { + method.setQueryString(new NameValuePair[] {new NameValuePair("user.name", username)}); + } + else { + NameValuePair[] newParams = new NameValuePair[params.length + 1]; + System.arraycopy(params, 0, newParams, 1, params.length); + newParams[0] = new NameValuePair("user.name", username); + method.setQueryString(newParams); + } + String actualUri = "no URI"; + try { + actualUri = method.getURI().toString();//should this be escaped string? + LOG.debug(type + ": " + method.getURI().getEscapedURI()); + int httpStatus = client.executeMethod(method); + LOG.debug("Http Status Code=" + httpStatus); + String resp = method.getResponseBodyAsString(); + LOG.debug("response: " + resp); + return new MethodCallRetVal(httpStatus, resp, actualUri, method.getName()); + } + catch (IOException ex) { + LOG.error("doHttpCall() failed", ex); + } + finally { + method.releaseConnection(); + } + return new MethodCallRetVal(-1, "Http " + type + " failed; see log file for details", actualUri, method.getName()); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index a5a867a..6a52e3b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -78,6 +78,9 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.metadata.formatting.JsonMetaDataFormatter; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; @@ -126,6 +129,7 @@ private String errorMessage; private String SQLState; + private Throwable downstreamError; // A limit on the number of threads that can be launched private int maxthreads; @@ -143,6 +147,7 @@ private boolean checkLockManager() { } catch (SemanticException e) { errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); + downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return false; @@ -483,8 +488,9 @@ public int compile(String command, boolean resetTaskIds) { perfLogger.PerfLogBegin(LOG, PerfLogger.DO_AUTHORIZATION); doAuthorization(sem); } catch (AuthorizationException authExp) { - console.printError("Authorization failed:" + authExp.getMessage() - + ". Use show grant to get more details."); + errorMessage = "Authorization failed:" + authExp.getMessage() + + ". Use show grant to get more details."; + console.printError(errorMessage); return 403; } finally { perfLogger.PerfLogEnd(LOG, PerfLogger.DO_AUTHORIZATION); @@ -502,6 +508,7 @@ public int compile(String command, boolean resetTaskIds) { } errorMessage += " " + e.getMessage(); SQLState = error.getSQLState(); + downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return error.getErrorCode(); @@ -837,12 +844,14 @@ else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) { } catch (SemanticException e) { errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); + downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (10); } catch (LockException e) { errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); + downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (10); @@ -869,8 +878,62 @@ private void releaseLocks(List hiveLocks) { } public CommandProcessorResponse run(String command) throws CommandNeedRetryException { + CommandProcessorResponse cpr = runInternal(command); + if(cpr.getResponseCode() == 0) + return cpr; + SessionState ss = SessionState.get(); + if(ss == null) + return cpr; + MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf()); + if(!(mdf instanceof JsonMetaDataFormatter)) + return cpr; + /*Here we want to encode the error in machine readable way (e.g. JSON) + * Ideally, errorCode would always be set to a canonical error defined in ErrorMsg. + * In practice that is rarely the case, so the messy logic below tries to tease + * out canonical error code if it can. Exclude stack trace from output when + * the error is a specific/expected one. + * It's written to stdout for backward compatibility (WebHCat consumes it).*/ + try { + if(downstreamError == null) { + mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState); + return cpr; + } + ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode()); + if(canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) { + /*Some HiveExceptions (e.g. SemanticException) don't set + canonical ErrorMsg explicitly, but there is logic + (e.g. #compile()) to find an appropriate canonical error and + return its code as error code. In this case we want to + preserve it for downstream code to interpret*/ + mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState, null); + return cpr; + } + if(downstreamError instanceof HiveException) { + HiveException rc = (HiveException) downstreamError; + mdf.error(ss.out, errorMessage, + rc.getCanonicalErrorMsg().getErrorCode(), SQLState, + rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? + org.apache.hadoop.util.StringUtils.stringifyException(rc) + : null); + } + else { + ErrorMsg canonicalMsg = + ErrorMsg.getErrorMsg(downstreamError.getMessage()); + mdf.error(ss.out, errorMessage, canonicalMsg.getErrorCode(), + SQLState, org.apache.hadoop.util.StringUtils. + stringifyException(downstreamError)); + } + } + catch(HiveException ex) { + console.printError("Unable to JSON-encode the error", + org.apache.hadoop.util.StringUtils.stringifyException(ex)); + } + return cpr; + } + private CommandProcessorResponse runInternal(String command) throws CommandNeedRetryException { errorMessage = null; SQLState = null; + downstreamError = null; if (!validateConfVariables()) { return new CommandProcessorResponse(12, errorMessage, SQLState); @@ -885,10 +948,11 @@ public CommandProcessorResponse run(String command) throws CommandNeedRetryExcep driverRunHook.preDriverRun(hookContext); } } catch (Exception e) { - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) - + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e); + errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); - console.printError(errorMessage); + downstreamError = e; + console.printError(errorMessage + "\n" + + org.apache.hadoop.util.StringUtils.stringifyException(e)); return new CommandProcessorResponse(12, errorMessage, SQLState); } @@ -961,10 +1025,11 @@ public CommandProcessorResponse run(String command) throws CommandNeedRetryExcep driverRunHook.postDriverRun(hookContext); } } catch (Exception e) { - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) - + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e); + errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); - console.printError(errorMessage); + downstreamError = e; + console.printError(errorMessage + "\n" + + org.apache.hadoop.util.StringUtils.stringifyException(e)); return new CommandProcessorResponse(12, errorMessage, SQLState); } @@ -984,7 +1049,7 @@ private boolean validateConfVariables() { .getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_UNION_REMOVE))))) { errorMessage = "FAILED: Hive Internal Error: " + ErrorMsg.SUPPORT_DIR_MUST_TRUE_FOR_LIST_BUCKETING.getMsg(); - SQLState = ErrorMsg.findSQLState(errorMessage); + SQLState = ErrorMsg.SUPPORT_DIR_MUST_TRUE_FOR_LIST_BUCKETING.getSQLState(); console.printError(errorMessage + "\n"); valid = false; } @@ -1158,12 +1223,7 @@ public int execute() throws CommandNeedRetryException { } Task backupTask = tsk.getAndInitBackupTask(); if (backupTask != null) { - errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " - + tsk.getClass().getName(); - ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); - if (em != null) { - errorMessage += ". " + em.getMsg(); - } + setErrorMsgAndDetail(exitVal, tskRes.getTaskError(), tsk); console.printError(errorMessage); errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName(); console.printError(errorMessage); @@ -1184,13 +1244,7 @@ public int execute() throws CommandNeedRetryException { perfLogger.PerfLogEnd(LOG, PerfLogger.FAILURE_HOOK + ofh.getClass().getName()); } - - errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " - + tsk.getClass().getName(); - ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); - if (em != null) { - errorMessage += ". " + em.getMsg(); - } + setErrorMsgAndDetail(exitVal, tskRes.getTaskError(), tsk); SQLState = "08S01"; console.printError(errorMessage); if (!running.isEmpty()) { @@ -1273,6 +1327,7 @@ public int execute() throws CommandNeedRetryException { // TODO: do better with handling types of Exception here errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = "08S01"; + downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (12); @@ -1308,7 +1363,21 @@ public int execute() throws CommandNeedRetryException { return (0); } - + private void setErrorMsgAndDetail(int exitVal, Throwable downstreamError, Task tsk) { + this.downstreamError = downstreamError; + errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); + if(downstreamError != null) { + //here we assume that upstream code may have parametrized the msg from ErrorMsg + //so we want to keep it + errorMessage += ". " + downstreamError.getMessage(); + } + else { + ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); + if (em != null) { + errorMessage += ". " + em.getMsg(); + } + } + } /** * Launches a new task * @@ -1388,7 +1457,7 @@ public TaskResult pollTasks(Set results) { while (true) { while (resultIterator.hasNext()) { TaskResult tskRes = resultIterator.next(); - if (tskRes.isRunning() == false) { + if (!tskRes.isRunning()) { return tskRes; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index c796770..8330f65 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -32,11 +32,15 @@ /** * List of all error messages. * This list contains both compile time and run-time errors. - **/ + * + * This class supports parametrized messages such as (@link #TRUNCATE_FOR_NON_MANAGED_TABLE}. These are + * preferable over un-parametrized ones where arbitrary String is appended to the end of the message, + * for example {@link #getMsg(String)} and {@link #INVALID_TABLE}. + */ public enum ErrorMsg { // The error codes are Hive-specific and partitioned into the following ranges: - // 10000 to 19999: Errors occuring during semantic analysis and compilation of the query. + // 10000 to 19999: Errors occurring during semantic analysis and compilation of the query. // 20000 to 29999: Runtime errors where Hive believes that retries are unlikely to succeed. // 30000 to 39999: Runtime errors which Hive thinks may be transient and retrying may succeed. // 40000 to 49999: Errors where Hive is unable to advise about retries. @@ -351,6 +355,12 @@ TRUNCATE_LIST_BUCKETED_COLUMN(10240, "A column on which a partition/table is list bucketed cannot be truncated."), + TABLE_NOT_PARTITIONED(10241, "Table {0} is not a partitioned table", true), + DATABSAE_ALREADY_EXISTS(10242, "Database {0} already exists", true), + CANNOT_REPLACE_COLUMNS(10243, "Replace columns is not supported for table {0}. SerDe may be incompatible.", true), + BAD_LOCATION_VALUE(10244, "{0} is not absolute or has no scheme information. Please specify a complete absolute uri with scheme information."), + UNSUPPORTED_ALTER_TBL_OP(10245, "{0} alter table options is not supported"), + SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " + "It may have crashed with an error."), @@ -488,7 +498,7 @@ public static ErrorMsg getErrorMsg(int errorCode) { /** * For a given error message string, searches for a ErrorMsg enum - * that appears to be a match. If an match is found, returns the + * that appears to be a match. If a match is found, returns the * SQLState associated with the ErrorMsg. If a match * is not found or ErrorMsg has no SQLState, returns * the SQLState bound to the GENERIC_ERROR @@ -605,10 +615,41 @@ public String getMsg(String reason) { public String format(String reason) { return format(new String[]{reason}); } - + /** + * If the message is parametrized, this will fill the parameters with supplied + * {@code reasons}, otherwise {@code reasons} are appended at the end of the + * message. + */ public String format(String... reasons) { - assert format != null; - return format.format(reasons); + /* Not all messages are parametrized even those that should have been, e.g {@link #INVALID_TABLE}. + INVALID_TABLE is usually used with {@link #getMsg(String)}. + This method can also be used with INVALID_TABLE and the like and will match getMsg(String) behavior. + + Another example: {@link #INVALID_PARTITION}. Ideally you want the message to have 2 parameters one for + partition name one for table name. Since this is already defined w/o any parameters, one can still call + {@code INVALID_PARTITION.format(" "}. This way the message text will be slightly + different but at least the errorCode will match. Note this, should not be abused by adding anything other + than what should have been parameter names to keep msg text standardized. + */ + if(reasons == null || reasons.length == 0) { + return getMsg(); + } + if(format != null) { + return format.format(reasons); + } + if(reasons.length > 1) { + StringBuilder sb = new StringBuilder(); + for(String re : reasons) { + if(re != null) { + if(sb.length() > 0) { + sb.append(" "); + } + sb.append(re); + } + } + return getMsg(sb.toString()); + } + return getMsg(reasons[0]); } public String getErrorCodedMsg() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 295daab..9883659 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -80,6 +80,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -102,10 +103,8 @@ import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.metadata.formatting.JsonMetaDataFormatter; import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; -import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataFormatter; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; @@ -205,14 +204,7 @@ public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { // Pick the formatter to use to display the results. Either the // normal human readable output or a json object. - if ("json".equals(conf.get( - HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, "text"))) { - formatter = new JsonMetaDataFormatter(); - } else { - formatter = new TextMetaDataFormatter( - conf.getIntVar(HiveConf.ConfVars.CLIPRETTYOUTPUTNUMCOLS)); - } - + formatter = MetaDataFormatUtils.getFormatter(conf); INTERMEDIATE_ARCHIVED_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_ARCHIVED); INTERMEDIATE_ORIGINAL_DIR_SUFFIX = @@ -437,32 +429,10 @@ public int execute(DriverContext driverContext) { if (alterTableExchangePartition != null) { return exchangeTablePartition(db, alterTableExchangePartition); } - - } catch (InvalidTableException e) { - formatter.consoleError(console, "Table " + e.getTableName() + " does not exist", - formatter.MISSING); - LOG.debug(stringifyException(e)); - return 1; - } catch (AlreadyExistsException e) { - formatter.consoleError(console, e.getMessage(), formatter.CONFLICT); - return 1; - } catch (NoSuchObjectException e) { - formatter.consoleError(console, e.getMessage(), - "\n" + stringifyException(e), - formatter.MISSING); + } catch (Throwable e) { + setException(e); + LOG.error(stringifyException(e)); return 1; - } catch (HiveException e) { - formatter.consoleError(console, - "FAILED: Error in metadata: " + e.getMessage(), - "\n" + stringifyException(e), - formatter.ERROR); - LOG.debug(stringifyException(e)); - return 1; - } catch (Exception e) { - formatter.consoleError(console, "Failed with exception " + e.getMessage(), - "\n" + stringifyException(e), - formatter.ERROR); - return (1); } assert false; return 0; @@ -881,7 +851,7 @@ private int alterDatabase(AlterDatabaseDesc alterDbDesc) throws HiveException { } db.alterDatabase(database.getName(), database); } else { - throw new HiveException("ERROR: The database " + dbName + " does not exist."); + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } return 0; } @@ -1000,16 +970,6 @@ private int alterIndex(Hive db, AlterIndexDesc alterIndex) throws HiveException * @return Returns 0 when execution succeeds and above 0 if it fails. * @throws HiveException */ - /** - * Add a partition to a table. - * - * @param db - * Database to add the partition to. - * @param addPartitionDesc - * Add this partition. - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - */ private int addPartition(Hive db, AddPartitionDesc addPartitionDesc) throws HiveException { Table tbl = db.getTable(addPartitionDesc.getDbName(), addPartitionDesc.getTableName()); @@ -1118,7 +1078,7 @@ private int alterTableAlterPart(Hive db, AlterTableAlterPartDesc alterPartitionD try { db.alterTable(tabName, tbl); } catch (InvalidOperationException e) { - throw new HiveException("Uable to update table"); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to alter " + tabName); } work.getInputs().add(new ReadEntity(tbl)); @@ -1878,10 +1838,7 @@ private int showPartitions(Hive db, ShowPartitionsDesc showParts) throws HiveExc tbl = db.getTable(tabName); if (!tbl.isPartitioned()) { - formatter.consoleError(console, - "Table " + tabName + " is not a partitioned table", - formatter.ERROR); - return 1; + throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tabName); } if (showParts.getPartSpec() != null) { parts = db.getPartitionNames(tbl.getDbName(), @@ -1899,20 +1856,12 @@ private int showPartitions(Hive db, ShowPartitionsDesc showParts) throws HiveExc formatter.showTablePartitons(outStream, parts); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - } catch (FileNotFoundException e) { - formatter.logWarn(outStream, "show partitions: " + stringifyException(e), - MetaDataFormatter.ERROR); - return 1; - } catch (IOException e) { - formatter.logWarn(outStream, "show partitions: " + stringifyException(e), - MetaDataFormatter.ERROR); - return 1; } catch (Exception e) { - throw new HiveException(e); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show partitions for table " + tabName); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } return 0; @@ -2212,7 +2161,7 @@ private int showIndexes(Hive db, ShowIndexesDesc showIndexes) throws HiveExcepti /** * Write a list of the available databases to a file. * - * @param showDatabases + * @param showDatabasesDesc * These are the databases we're interested in. * @return Returns 0 when execution succeeds and above 0 if it fails. * @throws HiveException @@ -2237,20 +2186,12 @@ private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws H outStream = fs.create(resFile); formatter.showDatabases(outStream, databases); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - } catch (FileNotFoundException e) { - formatter.logWarn(outStream, "show databases: " + stringifyException(e), - formatter.ERROR); - return 1; - } catch (IOException e) { - formatter.logWarn(outStream, "show databases: " + stringifyException(e), - formatter.ERROR); - return 1; } catch (Exception e) { - throw new HiveException(e.toString()); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show databases"); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } return 0; } @@ -2272,8 +2213,7 @@ private int showTables(Hive db, ShowTablesDesc showTbls) throws HiveException { String dbName = showTbls.getDbName(); if (!db.databaseExists(dbName)) { - throw new HiveException("ERROR: The database " + dbName + " does not exist."); - + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } if (showTbls.getPattern() != null) { LOG.info("pattern: " + showTbls.getPattern()); @@ -2292,20 +2232,12 @@ private int showTables(Hive db, ShowTablesDesc showTbls) throws HiveException { SortedSet sortedTbls = new TreeSet(tbls); formatter.showTables(outStream, sortedTbls); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - } catch (FileNotFoundException e) { - formatter.logWarn(outStream, "show table: " + stringifyException(e), - formatter.ERROR); - return 1; - } catch (IOException e) { - formatter.logWarn(outStream, "show table: " + stringifyException(e), - formatter.ERROR); - return 1; } catch (Exception e) { - throw new HiveException(e.toString()); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "in database" + dbName); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } return 0; } @@ -2337,8 +2269,7 @@ public int showColumns(Hive db, ShowColumnsDesc showCols) ((FSDataOutputStream) outStream).close(); outStream = null; } catch (IOException e) { - LOG.warn("show columns: " + stringifyException(e)); - return 1; + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); } finally { IOUtils.closeStream((FSDataOutputStream) outStream); } @@ -2668,9 +2599,7 @@ private int descDatabase(DescDatabaseDesc descDatabase) throws HiveException { Database database = db.getDatabase(descDatabase.getDatabaseName()); if (database == null) { - formatter.error(outStream, - "No such database: " + descDatabase.getDatabaseName(), - formatter.MISSING); + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, descDatabase.getDatabaseName()); } else { Map params = null; if(descDatabase.isExt()) { @@ -2683,22 +2612,12 @@ private int descDatabase(DescDatabaseDesc descDatabase) throws HiveException { database.getLocationUri(), params); } - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - } catch (FileNotFoundException e) { - formatter.logWarn(outStream, - "describe database: " + stringifyException(e), - formatter.ERROR); - return 1; } catch (IOException e) { - formatter.logWarn(outStream, - "describe database: " + stringifyException(e), - formatter.ERROR); - return 1; - } catch (Exception e) { - throw new HiveException(e.toString()); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } return 0; } @@ -2713,7 +2632,7 @@ private int descDatabase(DescDatabaseDesc descDatabase) throws HiveException { * @return Return 0 when execution succeeds and above 0 if it fails. */ private int showTableStatus(Hive db, ShowTableStatusDesc showTblStatus) throws HiveException { - // get the tables for the desired pattenn - populate the output stream + // get the tables for the desired pattern - populate the output stream List
tbls = new ArrayList
(); Map part = showTblStatus.getPartSpec(); Partition par = null; @@ -2749,20 +2668,12 @@ private int showTableStatus(Hive db, ShowTableStatusDesc showTblStatus) throws H formatter.showTableStatus(outStream, db, conf, tbls, part, par); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - } catch (FileNotFoundException e) { - formatter.logInfo(outStream, "show table status: " + stringifyException(e), - formatter.ERROR); - return 1; - } catch (IOException e) { - formatter.logInfo(outStream, "show table status: " + stringifyException(e), - formatter.ERROR); - return 1; } catch (Exception e) { - throw new HiveException(e); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show table status"); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } return 0; } @@ -2835,7 +2746,6 @@ private int showTableProperties(Hive db, ShowTblPropertiesDesc showTblPrpt) thro return 0; } - /** * Write the description of a table to a file. * @@ -2860,36 +2770,26 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { if (tbl == null) { FileSystem fs = resFile.getFileSystem(conf); outStream = fs.create(resFile); - String errMsg = "Table " + tableName + " does not exist"; - formatter.error(outStream, errMsg, formatter.MISSING); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - return 0; + throw new HiveException(ErrorMsg.INVALID_TABLE, tableName); } if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { FileSystem fs = resFile.getFileSystem(conf); outStream = fs.create(resFile); - String errMsg = "Partition " + descTbl.getPartSpec() + " for table " - + tableName + " does not exist"; - formatter.error(outStream, errMsg, formatter.MISSING); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - return 0; + throw new HiveException(ErrorMsg.INVALID_PARTITION, + StringUtils.join(descTbl.getPartSpec().keySet(), ','), tableName); } tbl = part.getTable(); } - } catch (FileNotFoundException e) { - formatter.logInfo(outStream, "describe table: " + stringifyException(e), - formatter.ERROR); - return 1; } catch (IOException e) { - formatter.logInfo(outStream, "describe table: " + stringifyException(e), - formatter.ERROR); - return 1; + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } try { @@ -2916,21 +2816,13 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty()); LOG.info("DDLTask: written data for " + tbl.getTableName()); - ((FSDataOutputStream) outStream).close(); + outStream.close(); outStream = null; - } catch (FileNotFoundException e) { - formatter.logInfo(outStream, "describe table: " + stringifyException(e), - formatter.ERROR); - return 1; } catch (IOException e) { - formatter.logInfo(outStream, "describe table: " + stringifyException(e), - formatter.ERROR); - return 1; - } catch (Exception e) { - throw new HiveException(e); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName); } finally { - IOUtils.closeStream((FSDataOutputStream) outStream); + IOUtils.closeStream(outStream); } return 0; @@ -3009,11 +2901,8 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { if (alterTbl.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) { part = db.getPartition(tbl, alterTbl.getPartSpec(), false); if (part == null) { - formatter.consoleError(console, - "Partition : " + alterTbl.getPartSpec().toString() - + " does not exist.", - formatter.MISSING); - return 1; + throw new HiveException(ErrorMsg.INVALID_PARTITION, + StringUtils.join(alterTbl.getPartSpec().keySet(), ',') + " for table " + alterTbl.getOldName()); } } else { @@ -3044,10 +2933,7 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { while (iterOldCols.hasNext()) { String oldColName = iterOldCols.next().getName(); if (oldColName.equalsIgnoreCase(newColName)) { - formatter.consoleError(console, - "Column '" + newColName + "' exists", - formatter.CONFLICT); - return 1; + throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, newColName); } } oldCols.add(newCol); @@ -3078,10 +2964,7 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { String oldColName = col.getName(); if (oldColName.equalsIgnoreCase(newName) && !oldColName.equalsIgnoreCase(oldName)) { - formatter.consoleError(console, - "Column '" + newName + "' exists", - formatter.CONFLICT); - return 1; + throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, newName); } else if (oldColName.equalsIgnoreCase(oldName)) { col.setName(newName); if (type != null && !type.trim().equals("")) { @@ -3108,17 +2991,11 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { // did not find the column if (!found) { - formatter.consoleError(console, - "Column '" + oldName + "' does not exists", - formatter.MISSING); - return 1; + throw new HiveException(ErrorMsg.INVALID_COLUMN, oldName); } // after column is not null, but we did not find it. if ((afterCol != null && !afterCol.trim().equals("")) && position < 0) { - formatter.consoleError(console, - "Column '" + afterCol + "' does not exists", - formatter.MISSING); - return 1; + throw new HiveException(ErrorMsg.INVALID_COLUMN, afterCol); } if (position >= 0) { @@ -3139,11 +3016,7 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { && !tbl.getSerializationLib().equals(LazySimpleSerDe.class.getName()) && !tbl.getSerializationLib().equals(ColumnarSerDe.class.getName()) && !tbl.getSerializationLib().equals(DynamicSerDe.class.getName())) { - formatter.consoleError(console, - "Replace columns is not supported for this table. " - + "SerDe may be incompatible.", - formatter.ERROR); - return 1; + throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); } tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { @@ -3242,10 +3115,7 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { URI locUri = new URI(newLocation); if (!locUri.isAbsolute() || locUri.getScheme() == null || locUri.getScheme().trim().equals("")) { - throw new HiveException( - newLocation - + " is not absolute or has no scheme information. " - + "Please specify a complete absolute uri with scheme information."); + throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); } if (part != null) { part.setLocation(newLocation); @@ -3313,34 +3183,18 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { tbl.setNumBuckets(alterTbl.getNumberBuckets()); } } else { - formatter.consoleError(console, - "Unsupported Alter commnad", - formatter.ERROR); - return 1; + throw new HiveException(ErrorMsg.UNSUPPORTED_ALTER_TBL_OP, alterTbl.getOp().toString()); } if (part == null && allPartitions == null) { - if (!updateModifiedParameters(tbl.getTTable().getParameters(), conf)) { - return 1; - } - try { - tbl.checkValidity(); - } catch (HiveException e) { - formatter.consoleError(console, - "Invalid table columns : " + e.getMessage(), - formatter.ERROR); - return 1; - } + updateModifiedParameters(tbl.getTTable().getParameters(), conf); + tbl.checkValidity(); } else if (part != null) { - if (!updateModifiedParameters(part.getParameters(), conf)) { - return 1; - } + updateModifiedParameters(part.getParameters(), conf); } else { for (Partition tmpPart: allPartitions) { - if (!updateModifiedParameters(tmpPart.getParameters(), conf)) { - return 1; - } + updateModifiedParameters(tmpPart.getParameters(), conf); } } @@ -3354,11 +3208,8 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { db.alterPartitions(tbl.getTableName(), allPartitions); } } catch (InvalidOperationException e) { - console.printError("Invalid alter operation: " + e.getMessage()); LOG.info("alter table: " + stringifyException(e)); - return 1; - } catch (HiveException e) { - return 1; + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); } // This is kind of hacky - the read entity contains the old table, whereas @@ -3525,16 +3376,12 @@ private int dropTable(Hive db, DropTableDesc dropTbl) * @param user * user that is doing the updating. */ - private boolean updateModifiedParameters(Map params, HiveConf conf) { + private boolean updateModifiedParameters(Map params, HiveConf conf) throws HiveException { String user = null; try { user = conf.getUser(); } catch (IOException e) { - formatter.consoleError(console, - "Unable to get current user: " + e.getMessage(), - stringifyException(e), - formatter.ERROR); - return false; + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to get current user"); } params.put("last_modified_by", user); @@ -3562,17 +3409,21 @@ private void validateSerDe(String serdeName) throws HiveException { * @param crtDb * @return Always returns 0 * @throws HiveException - * @throws AlreadyExistsException */ private int createDatabase(Hive db, CreateDatabaseDesc crtDb) - throws HiveException, AlreadyExistsException { + throws HiveException { Database database = new Database(); database.setName(crtDb.getName()); database.setDescription(crtDb.getComment()); database.setLocationUri(crtDb.getLocationUri()); database.setParameters(crtDb.getDatabaseProperties()); - - db.createDatabase(database, crtDb.getIfNotExists()); + try { + db.createDatabase(database, crtDb.getIfNotExists()); + } + catch (AlreadyExistsException ex) { + //it would be better if AlreadyExistsException had an errorCode field.... + throw new HiveException(ex, ErrorMsg.DATABSAE_ALREADY_EXISTS, crtDb.getName()); + } return 0; } @@ -3582,11 +3433,15 @@ private int createDatabase(Hive db, CreateDatabaseDesc crtDb) * @param dropDb * @return Always returns 0 * @throws HiveException - * @throws NoSuchObjectException */ private int dropDatabase(Hive db, DropDatabaseDesc dropDb) - throws HiveException, NoSuchObjectException { - db.dropDatabase(dropDb.getDatabaseName(), true, dropDb.getIfExists(), dropDb.isCasdade()); + throws HiveException { + try { + db.dropDatabase(dropDb.getDatabaseName(), true, dropDb.getIfExists(), dropDb.isCasdade()); + } + catch (NoSuchObjectException ex) { + throw new HiveException(ex, ErrorMsg.DATABASE_NOT_EXISTS, dropDb.getDatabaseName()); + } return 0; } @@ -3601,7 +3456,7 @@ private int switchDatabase(Hive db, SwitchDatabaseDesc switchDb) throws HiveException { String dbName = switchDb.getDatabaseName(); if (!db.databaseExists(dbName)) { - throw new HiveException("ERROR: The database " + dbName + " does not exist."); + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } db.setCurrentDatabase(dbName); @@ -3988,7 +3843,7 @@ private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws H fs.mkdirs(location); } } catch (Exception e) { - throw new HiveException(e); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); } return 0; } @@ -4023,15 +3878,11 @@ private int exchangeTablePartition(Hive db, return locations; } - private int setGenericTableAttributes(Table tbl) { + private int setGenericTableAttributes(Table tbl) throws HiveException { try { tbl.setOwner(conf.getUser()); } catch (IOException e) { - formatter.consoleError(console, - "Unable to get current user: " + e.getMessage(), - stringifyException(e), - formatter.ERROR); - return 1; + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to get current user"); } // set create time tbl.setCreateTime((int) (System.currentTimeMillis() / 1000)); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 17387a9..9c49190 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -95,6 +95,12 @@ protected List> childTasks; protected List> parentTasks; + /** + * this can be set by the Task, to provide more info about the failure in TaskResult + * where the Driver can find it. This is checked if {@link Task#execute(org.apache.hadoop.hive.ql.DriverContext)} + * returns non-0 code. + */ + private Throwable exception; public Task() { isdone = false; @@ -494,4 +500,10 @@ public void shutdown() { public List getResultSchema() { return null; } + Throwable getException() { + return exception; + } + void setException(Throwable ex) { + exception = ex; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TaskResult.java ql/src/java/org/apache/hadoop/hive/ql/exec/TaskResult.java index 26d2c33..def9389 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TaskResult.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TaskResult.java @@ -20,11 +20,13 @@ /** * TaskResult implementation. + * Note that different threads may be reading/writing this object **/ public class TaskResult { - protected int exitVal; - protected boolean runStatus; + protected volatile int exitVal; + protected volatile boolean runStatus; + private volatile Throwable taskError; public TaskResult() { exitVal = -1; @@ -35,11 +37,21 @@ public void setExitVal(int exitVal) { this.exitVal = exitVal; setRunning(false); } + public void setExitVal(int exitVal, Throwable taskError) { + this.setExitVal(exitVal); + this.taskError = taskError; + } public int getExitVal() { return exitVal; } + /** + * @return may contain details of the error which caused the task to fail or null + */ + public Throwable getTaskError() { + return taskError; + } public boolean isRunning() { return runStatus; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java index fcf9adc..ead7b59 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java @@ -66,7 +66,7 @@ public void runSequential() { } catch (Throwable t) { t.printStackTrace(); } - result.setExitVal(exitVal); + result.setExitVal(exitVal, tsk.getException()); } public static long getTaskRunnerID () { diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 17daaa1..2fda22d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -950,7 +950,7 @@ public Table getTable(final String dbName, final String tableName, } catch (NoSuchObjectException e) { if (throwException) { LOG.error(StringUtils.stringifyException(e)); - throw new InvalidTableException("Table " + tableName + " not found ", tableName); + throw new InvalidTableException(tableName); } return null; } catch (Exception e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveException.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveException.java index f544034..1d895ca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveException.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveException.java @@ -18,11 +18,17 @@ package org.apache.hadoop.hive.ql.metadata; +import org.apache.hadoop.hive.ql.ErrorMsg; + /** * Generic exception class for Hive. */ public class HiveException extends Exception { + /** + * Standard predefined message with error code and possibly SQL State, etc. + */ + private ErrorMsg canonicalErrorMsg = ErrorMsg.GENERIC_ERROR; public HiveException() { super(); } @@ -38,4 +44,26 @@ public HiveException(Throwable cause) { public HiveException(String message, Throwable cause) { super(message, cause); } + + public HiveException(ErrorMsg message, String... msgArgs) { + this(null, message, msgArgs); + } + + /** + * This is the recommended constructor to use since it helps use + * canonical messages throughout. + * @param errorMsg Canonical error message + * @param msgArgs message arguments if message is parametrized; must be {@code null} is message takes no arguments + */ + public HiveException(Throwable cause, ErrorMsg errorMsg, String... msgArgs) { + super(errorMsg.format(msgArgs), cause); + canonicalErrorMsg = errorMsg; + + } + /** + * @return {@link ErrorMsg#GENERIC_ERROR} by default + */ + public ErrorMsg getCanonicalErrorMsg() { + return canonicalErrorMsg; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java index 10d00e7..ebf37e2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/InvalidTableException.java @@ -18,31 +18,17 @@ package org.apache.hadoop.hive.ql.metadata; +import org.apache.hadoop.hive.ql.ErrorMsg; + /** * Generic exception class for Hive. - * */ public class InvalidTableException extends HiveException { String tableName; public InvalidTableException(String tableName) { - super(); - this.tableName = tableName; - } - - public InvalidTableException(String message, String tableName) { - super(message); - this.tableName = tableName; - } - - public InvalidTableException(Throwable cause, String tableName) { - super(cause); - this.tableName = tableName; - } - - public InvalidTableException(String message, Throwable cause, String tableName) { - super(message, cause); + super(ErrorMsg.INVALID_TABLE, tableName); this.tableName = tableName; } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index e24f5f9..a85a19d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -48,12 +48,12 @@ * json. */ public class JsonMetaDataFormatter implements MetaDataFormatter { - private static final Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask"); + private static final Log LOG = LogFactory.getLog(JsonMetaDataFormatter.class); /** * Convert the map to a JSON string. */ - public void asJson(OutputStream out, Map data) + private void asJson(OutputStream out, Map data) throws HiveException { try { @@ -66,70 +66,29 @@ public void asJson(OutputStream out, Map data) /** * Write an error message. */ - public void error(OutputStream out, String msg, int errorCode) + @Override + public void error(OutputStream out, String msg, int errorCode, String sqlState) throws HiveException { - asJson(out, - MapBuilder.create() - .put("error", msg) - .put("errorCode", errorCode) - .build()); + error(out, msg, errorCode, sqlState, null); } - - /** - * Write a log warn message. - */ - public void logWarn(OutputStream out, String msg, int errorCode) - throws HiveException - { - LOG.warn(msg); - error(out, msg, errorCode); - } - - /** - * Write a log info message. - */ - public void logInfo(OutputStream out, String msg, int errorCode) - throws HiveException - { - LOG.info(msg); - error(out, msg, errorCode); - } - - /** - * Write a console error message. - */ - public void consoleError(LogHelper console, String msg, int errorCode) { - try { - console.printError(msg); - error(console.getOutStream(), msg, errorCode); - } catch (HiveException e) { - console.printError("unable to create json: " + e); + @Override + public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) throws HiveException { + MapBuilder mb = MapBuilder.create().put("error", errorMessage); + if(errorDetail != null) { + mb.put("errorDetail", errorDetail); } - } - - /** - * Write a console error message. - */ - public void consoleError(LogHelper console, String msg, String detail, - int errorCode) - { - try { - console.printError(msg, detail); - asJson(console.getOutStream(), - MapBuilder.create() - .put("error", msg) - .put("errorDetail", detail) - .put("errorCode", errorCode) - .build()); - } catch (HiveException e) { - console.printError("unable to create json: " + e); + mb.put("errorCode", errorCode); + if(sqlState != null) { + mb.put("sqlState", sqlState); } + asJson(out,mb.build()); } /** * Show a list of tables. */ + @Override public void showTables(DataOutputStream out, Set tables) throws HiveException { @@ -142,6 +101,7 @@ public void showTables(DataOutputStream out, Set tables) /** * Describe table. */ + @Override public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, @@ -178,6 +138,7 @@ public void describeTable(DataOutputStream out, .build(); } + @Override public void showTableStatus(DataOutputStream out, Hive db, HiveConf conf, @@ -366,6 +327,7 @@ private void putFileSystemsStats(MapBuilder builder, List locations, /** * Show the table partitions. */ + @Override public void showTablePartitons(DataOutputStream out, List parts) throws HiveException { @@ -424,6 +386,7 @@ public void showTablePartitons(DataOutputStream out, List parts) /** * Show a list of databases */ + @Override public void showDatabases(DataOutputStream out, List databases) throws HiveException { @@ -436,6 +399,7 @@ public void showDatabases(DataOutputStream out, List databases) /** * Show the description of a database */ + @Override public void showDatabaseDescription(DataOutputStream out, String database, String comment, diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java index b08fb76..0d71891 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java @@ -28,6 +28,7 @@ import java.util.Set; import org.apache.commons.lang.StringEscapeUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; @@ -325,4 +326,11 @@ public static String getIndexColumnsHeader() { formatOutput(ShowIndexesDesc.getSchema().split("#")[0].split(","), indexCols); return indexCols.toString(); } + public static MetaDataFormatter getFormatter(HiveConf conf) { + if ("json".equals(conf.get(HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, "text"))) { + return new JsonMetaDataFormatter(); + } else { + return new TextMetaDataFormatter(conf.getIntVar(HiveConf.ConfVars.CLIPRETTYOUTPUTNUMCOLS)); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java index 6632afe..4c40034 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java @@ -38,49 +38,18 @@ */ public interface MetaDataFormatter { /** - * Generic error code. This and the other error codes are - * designed to match the HTTP status codes. - */ - static final int ERROR = 500; - - /** - * Missing error code. - */ - static final int MISSING = 404; - - /** - * Conflict error code. - */ - static final int CONFLICT = 409; - - /** * Write an error message. + * @param sqlState if {@code null}, will be ignored */ - public void error(OutputStream out, String msg, int errorCode) - throws HiveException; - - /** - * Write a log warn message. - */ - public void logWarn(OutputStream out, String msg, int errorCode) - throws HiveException; - - /** - * Write a log info message. - */ - public void logInfo(OutputStream out, String msg, int errorCode) + public void error(OutputStream out, String msg, int errorCode, String sqlState) throws HiveException; - /** - * Write a console error message. - */ - public void consoleError(LogHelper console, String msg, int errorCode); - - /** - * Write a console error message. - */ - public void consoleError(LogHelper console, String msg, String detail, - int errorCode); + /** + * @param sqlState if {@code null}, will be skipped in output + * @param errorDetail usually string version of some Exception, if {@code null}, will be ignored + */ + public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) + throws HiveException; /** * Show a list of tables. diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index 97857f4..ef6e570 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -49,8 +49,8 @@ * Format table and index information for human readability using * simple lines of text. */ -public class TextMetaDataFormatter implements MetaDataFormatter { - private static final Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask"); +class TextMetaDataFormatter implements MetaDataFormatter { + private static final Log LOG = LogFactory.getLog(TextMetaDataFormatter.class); private static final int separator = Utilities.tabCode; private static final int terminator = Utilities.newLineCode; @@ -67,54 +67,35 @@ public TextMetaDataFormatter(int prettyOutputNumCols) { /** * Write an error message. */ - public void error(OutputStream out, String msg, int errorCode) + @Override + public void error(OutputStream out, String msg, int errorCode, String sqlState) throws HiveException { - try { - out.write(msg.getBytes("UTF-8")); - out.write(terminator); - } catch (Exception e) { - throw new HiveException(e); - } - } - - /** - * Write a log warn message. - */ - public void logWarn(OutputStream out, String msg, int errorCode) - throws HiveException - { - LOG.warn(msg); + error(out, msg, errorCode, sqlState, null); } - /** - * Write a log info message. - */ - public void logInfo(OutputStream out, String msg, int errorCode) - throws HiveException + @Override + public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail) + throws HiveException { - LOG.info(msg); - } - - /** - * Write a console error message. - */ - public void consoleError(LogHelper console, String msg, int errorCode) { - console.printError(msg); - } - - /** - * Write a console error message. - */ - public void consoleError(LogHelper console, String msg, String detail, - int errorCode) - { - console.printError(msg, detail); + try { + out.write(errorMessage.getBytes("UTF-8")); + if(errorDetail != null) { + out.write(errorDetail.getBytes("UTF-8")); + } + out.write(errorCode); + if(sqlState != null) { + out.write(sqlState.getBytes("UTF-8"));//this breaks all the tests in .q files + } + out.write(terminator); + } catch (Exception e) { + throw new HiveException(e); + } } - /** * Show a list of tables. */ + @Override public void showTables(DataOutputStream out, Set tables) throws HiveException { @@ -131,6 +112,7 @@ public void showTables(DataOutputStream out, Set tables) } } + @Override public void describeTable(DataOutputStream outStream, String colPath, String tableName, Table tbl, Partition part, List cols, @@ -187,6 +169,7 @@ public void describeTable(DataOutputStream outStream, } } + @Override public void showTableStatus(DataOutputStream outStream, Hive db, HiveConf conf, @@ -406,6 +389,7 @@ private void writeFileSystemStats(DataOutputStream outStream, /** * Show the table partitions. */ + @Override public void showTablePartitons(DataOutputStream outStream, List parts) throws HiveException { @@ -430,6 +414,7 @@ public void showTablePartitons(DataOutputStream outStream, List parts) /** * Show the list of databases */ + @Override public void showDatabases(DataOutputStream outStream, List databases) throws HiveException { @@ -447,6 +432,7 @@ public void showDatabases(DataOutputStream outStream, List databases) /** * Describe a database */ + @Override public void showDatabaseDescription(DataOutputStream outStream, String database, String comment, diff --git ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index 9fc52fa..400abf3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -1007,7 +1007,6 @@ public int checkCliDriverResults(String tname) throws Exception { ".*LOCATION '.*", ".*transient_lastDdlTime.*", ".*last_modified_.*", - ".*java.lang.RuntimeException.*", ".*at org.*", ".*at sun.*", ".*at java.*", diff --git ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out index 67dca5a..ffb6d87 100644 --- ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out +++ ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out @@ -14,5 +14,4 @@ POSTHOOK: type: SHOWPARTITIONS PREHOOK: query: ALTER TABLE part_whitelist_test ADD PARTITION (ds='1,2,3,4') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Input: default@part_whitelist_test -FAILED: Error in metadata: MetaException(message:Partition value '1,2,3,4' contains a character not matched by whitelist pattern '[\\x20-\\x7E&&[^,]]*'. (configure with hive.metastore.partition.name.whitelist.pattern)) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Partition value '1,2,3,4' contains a character not matched by whitelist pattern '[\\x20-\\x7E&&[^,]]*'. (configure with hive.metastore.partition.name.whitelist.pattern)) diff --git ql/src/test/results/clientnegative/addpart1.q.out ql/src/test/results/clientnegative/addpart1.q.out index f4da8f1..b4be19c 100644 --- ql/src/test/results/clientnegative/addpart1.q.out +++ ql/src/test/results/clientnegative/addpart1.q.out @@ -18,5 +18,4 @@ b=f/c=s PREHOOK: query: alter table addpart1 add partition (b='f', c='') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Input: default@addpart1 -FAILED: Error in metadata: get partition: Value for key c is null or empty -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. get partition: Value for key c is null or empty diff --git ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out index 3117392..1cf067a 100644 --- ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out +++ ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out @@ -37,5 +37,4 @@ PREHOOK: query: drop table alter_part_nodrop_table PREHOOK: type: DROPTABLE PREHOOK: Input: default@alter_part_nodrop_table PREHOOK: Output: default@alter_part_nodrop_table -FAILED: Error in metadata: Table alter_part_nodrop_table Partitionyear=1996/month=10 is protected from being dropped -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table alter_part_nodrop_table Partitionyear=1996/month=10 is protected from being dropped diff --git ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out index a094f24..aa1fbcd 100644 --- ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out +++ ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out @@ -22,5 +22,4 @@ PREHOOK: query: ALTER TABLE part_whitelist_test PARTITION (ds='1') rename to par PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@part_whitelist_test PREHOOK: Output: default@part_whitelist_test@ds=1 -FAILED: Error in metadata: Unable to rename partition. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to rename partition. diff --git ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out index da6401f..c26234c 100644 --- ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out +++ ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out @@ -26,5 +26,4 @@ POSTHOOK: Lineage: alter_rename_partition PARTITION(pcol1=old_part1:,pcol2=old_p PREHOOK: query: alter table alter_rename_partition partition (pCol1='nonexist_part1:', pcol2='nonexist_part2:') rename to partition (pCol1='new_part1:', pcol2='new_part2:') PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@alter_rename_partition -Failed with exception null -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. null diff --git ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out index 1bda93e..2a5f921 100644 --- ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out +++ ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out @@ -27,5 +27,4 @@ PREHOOK: query: alter table alter_rename_partition partition (pCol1='old_part1:' PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@alter_rename_partition PREHOOK: Output: default@alter_rename_partition@pcol1=old_part1%3A/pcol2=old_part2%3A -FAILED: Error in metadata: Unable to rename partition. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to rename partition. diff --git ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out index 72feb31..384fcbe 100644 --- ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out +++ ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out @@ -27,5 +27,4 @@ PREHOOK: query: alter table alter_rename_partition partition (pCol1='old_part1:' PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@alter_rename_partition PREHOOK: Output: default@alter_rename_partition@pcol1=old_part1%3A/pcol2=old_part2%3A -FAILED: Error in metadata: Unable to rename partition. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to rename partition. diff --git ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out index e634648..f7d3b36 100644 --- ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out +++ ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out @@ -15,7 +15,6 @@ PREHOOK: query: alter table aa set serdeproperties ("input.regex" = "[^\\](.*)", PREHOOK: type: ALTERTABLE_SERDEPROPERTIES PREHOOK: Input: default@aa PREHOOK: Output: default@aa -Failed with exception Unclosed character class near index 7 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unclosed character class near index 7 [^\](.*) ^ -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git ql/src/test/results/clientnegative/alter_view_failure4.q.out ql/src/test/results/clientnegative/alter_view_failure4.q.out index ab7f0ee..b218c19 100644 --- ql/src/test/results/clientnegative/alter_view_failure4.q.out +++ ql/src/test/results/clientnegative/alter_view_failure4.q.out @@ -18,5 +18,4 @@ PREHOOK: query: -- should fail: LOCATION clause is illegal PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Input: default@src PREHOOK: Input: default@xxx5 -FAILED: Error in metadata: LOCATION clause illegal for view partition -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. LOCATION clause illegal for view partition diff --git ql/src/test/results/clientnegative/altern1.q.out ql/src/test/results/clientnegative/altern1.q.out index c52ca04..73f8ad8 100644 --- ql/src/test/results/clientnegative/altern1.q.out +++ ql/src/test/results/clientnegative/altern1.q.out @@ -7,5 +7,4 @@ PREHOOK: query: alter table altern1 replace columns(a int, b int, ds string) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@altern1 PREHOOK: Output: default@altern1 -Invalid table columns : Partition column name ds conflicts with table columns. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition column name ds conflicts with table columns. diff --git ql/src/test/results/clientnegative/archive1.q.out ql/src/test/results/clientnegative/archive1.q.out index 03b309e..4daeaba 100644 --- ql/src/test/results/clientnegative/archive1.q.out +++ ql/src/test/results/clientnegative/archive1.q.out @@ -37,5 +37,4 @@ PREHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@srcpart_archived PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Partition(s) already archived -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition(s) already archived diff --git ql/src/test/results/clientnegative/archive2.q.out ql/src/test/results/clientnegative/archive2.q.out index 6a942fd..21ba836 100644 --- ql/src/test/results/clientnegative/archive2.q.out +++ ql/src/test/results/clientnegative/archive2.q.out @@ -31,5 +31,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr= PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Partition ds=2008-04-08/hr=12 is not archived. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition ds=2008-04-08/hr=12 is not archived. diff --git ql/src/test/results/clientnegative/archive_multi1.q.out ql/src/test/results/clientnegative/archive_multi1.q.out index 7153965..52e2125 100644 --- ql/src/test/results/clientnegative/archive_multi1.q.out +++ ql/src/test/results/clientnegative/archive_multi1.q.out @@ -58,5 +58,4 @@ PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Partition(s) already archived -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition(s) already archived diff --git ql/src/test/results/clientnegative/archive_multi2.q.out ql/src/test/results/clientnegative/archive_multi2.q.out index 5362df3..163d978 100644 --- ql/src/test/results/clientnegative/archive_multi2.q.out +++ ql/src/test/results/clientnegative/archive_multi2.q.out @@ -47,5 +47,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr= PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Partition ds=2008-04-08/hr=12 is not archived. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition ds=2008-04-08/hr=12 is not archived. diff --git ql/src/test/results/clientnegative/archive_multi3.q.out ql/src/test/results/clientnegative/archive_multi3.q.out index af6c3f9..6e21a76 100644 --- ql/src/test/results/clientnegative/archive_multi3.q.out +++ ql/src/test/results/clientnegative/archive_multi3.q.out @@ -56,5 +56,4 @@ PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Conflict with existing archive ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Conflict with existing archive ds=2008-04-08/hr=12 diff --git ql/src/test/results/clientnegative/archive_multi4.q.out ql/src/test/results/clientnegative/archive_multi4.q.out index 9a044fe..2d41343 100644 --- ql/src/test/results/clientnegative/archive_multi4.q.out +++ ql/src/test/results/clientnegative/archive_multi4.q.out @@ -57,5 +57,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08', hr='1 PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Conflict with existing archive ds=2008-04-08 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Conflict with existing archive ds=2008-04-08 diff --git ql/src/test/results/clientnegative/archive_multi5.q.out ql/src/test/results/clientnegative/archive_multi5.q.out index 8832ed2..0f9a6e1 100644 --- ql/src/test/results/clientnegative/archive_multi5.q.out +++ ql/src/test/results/clientnegative/archive_multi5.q.out @@ -56,5 +56,4 @@ PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Partition ds=2008-04-08/hr=11 is not archived. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition ds=2008-04-08/hr=11 is not archived. diff --git ql/src/test/results/clientnegative/archive_multi6.q.out ql/src/test/results/clientnegative/archive_multi6.q.out index b6606e6..cba6d3e 100644 --- ql/src/test/results/clientnegative/archive_multi6.q.out +++ ql/src/test/results/clientnegative/archive_multi6.q.out @@ -57,5 +57,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr= PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Error in metadata: Partition ds=2008-04-08/hr=12 is archived at level 1, and given partspec only has 2 specs. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition ds=2008-04-08/hr=12 is archived at level 1, and given partspec only has 2 specs. diff --git ql/src/test/results/clientnegative/archive_multi7.q.out ql/src/test/results/clientnegative/archive_multi7.q.out index 9e27929..74c0b9c 100644 --- ql/src/test/results/clientnegative/archive_multi7.q.out +++ ql/src/test/results/clientnegative/archive_multi7.q.out @@ -40,4 +40,3 @@ PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git ql/src/test/results/clientnegative/authorization_fail_1.q.out ql/src/test/results/clientnegative/authorization_fail_1.q.out index 020916f..ab1abe2 100644 --- ql/src/test/results/clientnegative/authorization_fail_1.q.out +++ ql/src/test/results/clientnegative/authorization_fail_1.q.out @@ -12,5 +12,5 @@ POSTHOOK: Output: default@authorization_fail_1 PREHOOK: query: grant Create on table authorization_fail_1 to user hive_test_user PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@authorization_fail_1 -#### A masked pattern was here #### +Error: java.lang.RuntimeException: InvalidObjectException(message:Create is already granted on table [default,authorization_fail_1] by hive_test_user) FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git ql/src/test/results/clientnegative/column_rename1.q.out ql/src/test/results/clientnegative/column_rename1.q.out index 52961e1..f4c101c 100644 --- ql/src/test/results/clientnegative/column_rename1.q.out +++ ql/src/test/results/clientnegative/column_rename1.q.out @@ -23,5 +23,4 @@ PREHOOK: query: alter table tstsrc change src_not_exist key_value string PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -Column 'src_not_exist' does not exists -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid column reference src_not_exist diff --git ql/src/test/results/clientnegative/column_rename2.q.out ql/src/test/results/clientnegative/column_rename2.q.out index 0ca78f9..ff08bb4 100644 --- ql/src/test/results/clientnegative/column_rename2.q.out +++ ql/src/test/results/clientnegative/column_rename2.q.out @@ -23,5 +23,4 @@ PREHOOK: query: alter table tstsrc change key value string PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -Column 'value' exists -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Duplicate column name: value diff --git ql/src/test/results/clientnegative/column_rename4.q.out ql/src/test/results/clientnegative/column_rename4.q.out index e776034..e4722f5 100644 --- ql/src/test/results/clientnegative/column_rename4.q.out +++ ql/src/test/results/clientnegative/column_rename4.q.out @@ -23,5 +23,4 @@ PREHOOK: query: alter table tstsrc change key key2 string after key_value PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -Column 'key_value' does not exists -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid column reference key_value diff --git ql/src/test/results/clientnegative/create_table_failure3.q.out ql/src/test/results/clientnegative/create_table_failure3.q.out index 2df2503..461891a 100644 --- ql/src/test/results/clientnegative/create_table_failure3.q.out +++ ql/src/test/results/clientnegative/create_table_failure3.q.out @@ -1,4 +1,3 @@ PREHOOK: query: create table table_in_database_creation_not_exist.test (a string) PREHOOK: type: CREATETABLE -FAILED: Error in metadata: InvalidObjectException(message:There is no database named table_in_database_creation_not_exist) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:There is no database named table_in_database_creation_not_exist) diff --git ql/src/test/results/clientnegative/create_table_failure4.q.out ql/src/test/results/clientnegative/create_table_failure4.q.out index d65196a..888c876 100644 --- ql/src/test/results/clientnegative/create_table_failure4.q.out +++ ql/src/test/results/clientnegative/create_table_failure4.q.out @@ -1,4 +1,3 @@ PREHOOK: query: create table `table_in_database_creation_not_exist.test` (a string) PREHOOK: type: CREATETABLE -FAILED: Error in metadata: InvalidObjectException(message:There is no database named table_in_database_creation_not_exist) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:There is no database named table_in_database_creation_not_exist) diff --git ql/src/test/results/clientnegative/create_table_wrong_regex.q.out ql/src/test/results/clientnegative/create_table_wrong_regex.q.out index be7ca05..97e8520 100644 --- ql/src/test/results/clientnegative/create_table_wrong_regex.q.out +++ ql/src/test/results/clientnegative/create_table_wrong_regex.q.out @@ -6,7 +6,6 @@ PREHOOK: query: create table aa ( test STRING ) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe' WITH SERDEPROPERTIES ("input.regex" = "[^\\](.*)", "output.format.string" = "$1s") PREHOOK: type: CREATETABLE -FAILED: Error in metadata: java.util.regex.PatternSyntaxException: Unclosed character class near index 7 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. java.util.regex.PatternSyntaxException: Unclosed character class near index 7 [^\](.*) ^ -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git ql/src/test/results/clientnegative/create_view_failure1.q.out ql/src/test/results/clientnegative/create_view_failure1.q.out index 27d4604..75d83a5 100644 --- ql/src/test/results/clientnegative/create_view_failure1.q.out +++ ql/src/test/results/clientnegative/create_view_failure1.q.out @@ -11,5 +11,4 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@xxx12 PREHOOK: query: CREATE VIEW xxx12 AS SELECT key FROM src PREHOOK: type: CREATEVIEW -FAILED: Error in metadata: AlreadyExistsException(message:Table xxx12 already exists) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Table xxx12 already exists) diff --git ql/src/test/results/clientnegative/create_view_failure2.q.out ql/src/test/results/clientnegative/create_view_failure2.q.out index fcae7f8..e7bde22 100644 --- ql/src/test/results/clientnegative/create_view_failure2.q.out +++ ql/src/test/results/clientnegative/create_view_failure2.q.out @@ -11,5 +11,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@xxx4 PREHOOK: query: CREATE TABLE xxx4(key int) PREHOOK: type: CREATETABLE -FAILED: Error in metadata: AlreadyExistsException(message:Table xxx4 already exists) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Table xxx4 already exists) diff --git ql/src/test/results/clientnegative/create_view_failure4.q.out ql/src/test/results/clientnegative/create_view_failure4.q.out index 0c6a64c..9b0fa96 100644 --- ql/src/test/results/clientnegative/create_view_failure4.q.out +++ ql/src/test/results/clientnegative/create_view_failure4.q.out @@ -6,5 +6,4 @@ PREHOOK: query: -- duplicate column names are illegal CREATE VIEW xxx5(x,x) AS SELECT key,value FROM src PREHOOK: type: CREATEVIEW -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name x in the table definition. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name x in the table definition. diff --git ql/src/test/results/clientnegative/database_create_already_exists.q.out ql/src/test/results/clientnegative/database_create_already_exists.q.out index 8fe3cc8..86aee91 100644 --- ql/src/test/results/clientnegative/database_create_already_exists.q.out +++ ql/src/test/results/clientnegative/database_create_already_exists.q.out @@ -11,5 +11,4 @@ CREATE DATABASE test_db POSTHOOK: type: CREATEDATABASE PREHOOK: query: CREATE DATABASE test_db PREHOOK: type: CREATEDATABASE -Database test_db already exists -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database test_db already exists diff --git ql/src/test/results/clientnegative/database_create_invalid_name.q.out ql/src/test/results/clientnegative/database_create_invalid_name.q.out index 1e58089..8d42949 100644 --- ql/src/test/results/clientnegative/database_create_invalid_name.q.out +++ ql/src/test/results/clientnegative/database_create_invalid_name.q.out @@ -6,5 +6,4 @@ default PREHOOK: query: -- Try to create a database with an invalid name CREATE DATABASE `test.db` PREHOOK: type: CREATEDATABASE -FAILED: Error in metadata: InvalidObjectException(message:test.db is not a valid database name) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:test.db is not a valid database name) diff --git ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out index 56335b4..6cb2e95 100644 --- ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out +++ ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out @@ -6,5 +6,4 @@ default PREHOOK: query: -- Try to drop a database that does not exist DROP DATABASE does_not_exist PREHOOK: type: DROPDATABASE -There is no database named does_not_exist -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: does_not_exist diff --git ql/src/test/results/clientnegative/database_drop_not_empty.q.out ql/src/test/results/clientnegative/database_drop_not_empty.q.out index baa8f37..ce7b6b2 100644 --- ql/src/test/results/clientnegative/database_drop_not_empty.q.out +++ ql/src/test/results/clientnegative/database_drop_not_empty.q.out @@ -24,5 +24,4 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE test_db PREHOOK: type: DROPDATABASE -FAILED: Error in metadata: InvalidOperationException(message:Database test_db is not empty) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database test_db is not empty) diff --git ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out index b297a99..492590a 100644 --- ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out +++ ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out @@ -24,5 +24,4 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE db_drop_non_empty_restrict PREHOOK: type: DROPDATABASE -FAILED: Error in metadata: InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty) diff --git ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out index 8b5674d..1d640e3 100644 --- ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out +++ ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out @@ -6,5 +6,4 @@ default PREHOOK: query: -- Try to switch to a database that does not exist USE does_not_exist PREHOOK: type: SWITCHDATABASE -FAILED: Error in metadata: ERROR: The database does_not_exist does not exist. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: does_not_exist diff --git ql/src/test/results/clientnegative/deletejar.q.out ql/src/test/results/clientnegative/deletejar.q.out index 4944fd9..b873e34 100644 --- ql/src/test/results/clientnegative/deletejar.q.out +++ ql/src/test/results/clientnegative/deletejar.q.out @@ -1,4 +1,3 @@ PREHOOK: query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE PREHOOK: type: CREATETABLE -FAILED: Error in metadata: Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe diff --git ql/src/test/results/clientnegative/describe_xpath1.q.out ql/src/test/results/clientnegative/describe_xpath1.q.out index 3996e29..6a19140 100644 --- ql/src/test/results/clientnegative/describe_xpath1.q.out +++ ql/src/test/results/clientnegative/describe_xpath1.q.out @@ -1,4 +1,3 @@ PREHOOK: query: describe src_thrift.$elem$ PREHOOK: type: DESCTABLE -#### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. cannot find field $elem$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString] diff --git ql/src/test/results/clientnegative/describe_xpath2.q.out ql/src/test/results/clientnegative/describe_xpath2.q.out index aac33a8..92d67b8 100644 --- ql/src/test/results/clientnegative/describe_xpath2.q.out +++ ql/src/test/results/clientnegative/describe_xpath2.q.out @@ -1,4 +1,3 @@ PREHOOK: query: describe src_thrift.$key$ PREHOOK: type: DESCTABLE -#### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. cannot find field $key$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString] diff --git ql/src/test/results/clientnegative/describe_xpath3.q.out ql/src/test/results/clientnegative/describe_xpath3.q.out index 2b66907..9612971 100644 --- ql/src/test/results/clientnegative/describe_xpath3.q.out +++ ql/src/test/results/clientnegative/describe_xpath3.q.out @@ -1,4 +1,3 @@ PREHOOK: query: describe src_thrift.lint.abc PREHOOK: type: DESCTABLE -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error in getting fields from serde.Unknown type for abc diff --git ql/src/test/results/clientnegative/describe_xpath4.q.out ql/src/test/results/clientnegative/describe_xpath4.q.out index 4297bcf..6650cb8 100644 --- ql/src/test/results/clientnegative/describe_xpath4.q.out +++ ql/src/test/results/clientnegative/describe_xpath4.q.out @@ -1,4 +1,3 @@ PREHOOK: query: describe src_thrift.mStringString.abc PREHOOK: type: DESCTABLE -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error in getting fields from serde.Unknown type for abc diff --git ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out index f44ace4..6b21dee 100644 --- ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out +++ ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out @@ -122,4 +122,4 @@ ALTER TABLE test_table123 REPLACE COLUMNS (a INT, b STRING) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@test_table123 PREHOOK: Output: default@test_table123 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. diff --git ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out index 5af55b8..5724f68 100644 --- ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out +++ ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out @@ -39,4 +39,4 @@ PREHOOK: query: ALTER TABLE test_table123 CHANGE COLUMN b b MAP PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@test_table123 PREHOOK: Output: default@test_table123 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. diff --git ql/src/test/results/clientnegative/drop_table_failure2.q.out ql/src/test/results/clientnegative/drop_table_failure2.q.out index 809f16f..7681716 100644 --- ql/src/test/results/clientnegative/drop_table_failure2.q.out +++ ql/src/test/results/clientnegative/drop_table_failure2.q.out @@ -8,5 +8,4 @@ DROP TABLE xxx6 PREHOOK: type: DROPTABLE PREHOOK: Input: default@xxx6 PREHOOK: Output: default@xxx6 -FAILED: Error in metadata: Cannot drop a view with DROP TABLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot drop a view with DROP TABLE diff --git ql/src/test/results/clientnegative/drop_table_failure3.q.out ql/src/test/results/clientnegative/drop_table_failure3.q.out index 655bf41..8435822 100644 --- ql/src/test/results/clientnegative/drop_table_failure3.q.out +++ ql/src/test/results/clientnegative/drop_table_failure3.q.out @@ -45,5 +45,4 @@ PREHOOK: query: drop table drop_table_failure_temp PREHOOK: type: DROPTABLE PREHOOK: Input: dtf3@drop_table_failure_temp PREHOOK: Output: dtf3@drop_table_failure_temp -FAILED: Error in metadata: Table drop_table_failure_temp Partitionp=p3 is protected from being dropped -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table drop_table_failure_temp Partitionp=p3 is protected from being dropped diff --git ql/src/test/results/clientnegative/drop_view_failure1.q.out ql/src/test/results/clientnegative/drop_view_failure1.q.out index 61ec927..e98133d 100644 --- ql/src/test/results/clientnegative/drop_view_failure1.q.out +++ ql/src/test/results/clientnegative/drop_view_failure1.q.out @@ -8,5 +8,4 @@ DROP VIEW xxx1 PREHOOK: type: DROPVIEW PREHOOK: Input: default@xxx1 PREHOOK: Output: default@xxx1 -FAILED: Error in metadata: Cannot drop a base table with DROP VIEW -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot drop a base table with DROP VIEW diff --git ql/src/test/results/clientnegative/external1.q.out ql/src/test/results/clientnegative/external1.q.out index 4f04777..55cd21b 100644 --- ql/src/test/results/clientnegative/external1.q.out +++ ql/src/test/results/clientnegative/external1.q.out @@ -1,4 +1,3 @@ #### A masked pattern was here #### PREHOOK: type: CREATETABLE -FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme) diff --git ql/src/test/results/clientnegative/external2.q.out ql/src/test/results/clientnegative/external2.q.out index 49652ff..cf84962 100644 --- ql/src/test/results/clientnegative/external2.q.out +++ ql/src/test/results/clientnegative/external2.q.out @@ -6,5 +6,4 @@ POSTHOOK: Output: default@external2 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Input: default@external2 -FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme) -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme) diff --git ql/src/test/results/clientnegative/invalid_columns.q.out ql/src/test/results/clientnegative/invalid_columns.q.out index ccc17d9..3311b0a 100644 --- ql/src/test/results/clientnegative/invalid_columns.q.out +++ ql/src/test/results/clientnegative/invalid_columns.q.out @@ -2,5 +2,4 @@ PREHOOK: query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERD STORED AS TEXTFILE TBLPROPERTIES('columns'='valid_colname,invalid.colname') PREHOOK: type: CREATETABLE -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Invalid column name 'invalid.colname' in the table definition -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Invalid column name 'invalid.colname' in the table definition diff --git ql/src/test/results/clientnegative/lockneg2.q.out ql/src/test/results/clientnegative/lockneg2.q.out index ea34746..44c665c 100644 --- ql/src/test/results/clientnegative/lockneg2.q.out +++ ql/src/test/results/clientnegative/lockneg2.q.out @@ -19,5 +19,4 @@ POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: UNLOCK TABLE tstsrc PREHOOK: type: UNLOCKTABLE -FAILED: Error in metadata: Table tstsrc is not locked -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table tstsrc is not locked diff --git ql/src/test/results/clientnegative/lockneg3.q.out ql/src/test/results/clientnegative/lockneg3.q.out index 0664348..ac85216 100644 --- ql/src/test/results/clientnegative/lockneg3.q.out +++ ql/src/test/results/clientnegative/lockneg3.q.out @@ -23,5 +23,4 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpar POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11') PREHOOK: type: UNLOCKTABLE -FAILED: Error in metadata: Table tstsrcpart is not locked -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table tstsrcpart is not locked diff --git ql/src/test/results/clientnegative/lockneg4.q.out ql/src/test/results/clientnegative/lockneg4.q.out index bd5ff83..8d1a893 100644 --- ql/src/test/results/clientnegative/lockneg4.q.out +++ ql/src/test/results/clientnegative/lockneg4.q.out @@ -29,5 +29,4 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpar POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SHOW LOCKS tstsrcpart PARTITION(ds='2008-04-08', hr='12') PREHOOK: type: SHOWLOCKS -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition {ds=2008-04-08, hr=12} for table tstsrcpart does not exist -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Partition {ds=2008-04-08, hr=12} for table tstsrcpart does not exist diff --git ql/src/test/results/clientnegative/lockneg5.q.out ql/src/test/results/clientnegative/lockneg5.q.out index 083ef32..674fad0 100644 --- ql/src/test/results/clientnegative/lockneg5.q.out +++ ql/src/test/results/clientnegative/lockneg5.q.out @@ -4,5 +4,4 @@ POSTHOOK: query: drop table tstsrcpart POSTHOOK: type: DROPTABLE PREHOOK: query: show locks tstsrcpart extended PREHOOK: type: SHOWLOCKS -FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table tstsrcpart not found -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table not found tstsrcpart diff --git ql/src/test/results/clientnegative/protectmode_tbl6.q.out ql/src/test/results/clientnegative/protectmode_tbl6.q.out index 08a66ed..0bbfcbf 100644 --- ql/src/test/results/clientnegative/protectmode_tbl6.q.out +++ ql/src/test/results/clientnegative/protectmode_tbl6.q.out @@ -23,5 +23,4 @@ PREHOOK: query: drop table tbl_protectmode_tbl6 PREHOOK: type: DROPTABLE PREHOOK: Input: default@tbl_protectmode_tbl6 PREHOOK: Output: default@tbl_protectmode_tbl6 -FAILED: Error in metadata: Table tbl_protectmode_tbl6 is protected from being dropped -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table tbl_protectmode_tbl6 is protected from being dropped diff --git ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out index f41b12b..590781c 100644 --- ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out +++ ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out @@ -38,5 +38,4 @@ PREHOOK: query: drop table tbl_protectmode__no_drop PREHOOK: type: DROPTABLE PREHOOK: Input: default@tbl_protectmode__no_drop PREHOOK: Output: default@tbl_protectmode__no_drop -FAILED: Error in metadata: Table tbl_protectmode__no_drop is protected from being dropped -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table tbl_protectmode__no_drop is protected from being dropped diff --git ql/src/test/results/clientnegative/serde_regex.q.out ql/src/test/results/clientnegative/serde_regex.q.out index 1c5b5fa..7d09ad2 100644 --- ql/src/test/results/clientnegative/serde_regex.q.out +++ ql/src/test/results/clientnegative/serde_regex.q.out @@ -19,5 +19,4 @@ WITH SERDEPROPERTIES ( "input.regex" = "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") (-|[0-9]*) (-|[0-9]*)(?: ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\"))?") STORED AS TEXTFILE PREHOOK: type: CREATETABLE -#### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.serde2.RegexSerDe doesn't allow column [3] named time with type timestamp) diff --git ql/src/test/results/clientnegative/serde_regex3.q.out ql/src/test/results/clientnegative/serde_regex3.q.out index 88ae604..b959238 100644 --- ql/src/test/results/clientnegative/serde_regex3.q.out +++ ql/src/test/results/clientnegative/serde_regex3.q.out @@ -16,5 +16,4 @@ PREHOOK: query: -- null input.regex, raise an exception ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.RegexSerDe' STORED AS TEXTFILE PREHOOK: type: CREATETABLE -#### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException This table does not have serde property "input.regex"!) diff --git ql/src/test/results/clientnegative/touch1.q.out ql/src/test/results/clientnegative/touch1.q.out index 3f3c5e7..cc1e52e 100644 --- ql/src/test/results/clientnegative/touch1.q.out +++ ql/src/test/results/clientnegative/touch1.q.out @@ -1,5 +1,4 @@ PREHOOK: query: ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='13') PREHOOK: type: ALTERTABLE_TOUCH PREHOOK: Input: default@srcpart -FAILED: Error in metadata: Specified partition does not exist -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Specified partition does not exist diff --git ql/src/test/results/clientnegative/touch2.q.out ql/src/test/results/clientnegative/touch2.q.out index b0b8826..045121a 100644 --- ql/src/test/results/clientnegative/touch2.q.out +++ ql/src/test/results/clientnegative/touch2.q.out @@ -1,5 +1,4 @@ PREHOOK: query: ALTER TABLE src TOUCH PARTITION (ds='2008-04-08', hr='12') PREHOOK: type: ALTERTABLE_TOUCH PREHOOK: Input: default@src -FAILED: Error in metadata: table is not partitioned but partition spec exists: {ds=2008-04-08, hr=12} -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. table is not partitioned but partition spec exists: {ds=2008-04-08, hr=12}