diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index ef39573..921e277 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -135,6 +135,7 @@ public HiveConnection(String uri, Properties info) throws SQLException { supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V4); + supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5); // open client session openSession(); diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java index fce19bf..aa6654d 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java @@ -231,6 +231,7 @@ public boolean execute(String sql) throws SQLException { case UKNOWN_STATE: throw new SQLException("Unknown query", "HY000"); case INITIALIZED_STATE: + case PENDING_STATE: case RUNNING_STATE: break; } diff --git a/service/if/TCLIService.thrift b/service/if/TCLIService.thrift index 1f49445..415f2e0 100644 --- a/service/if/TCLIService.thrift +++ b/service/if/TCLIService.thrift @@ -48,6 +48,9 @@ enum TProtocolVersion { // V4 add support for decimial datatype HIVE_CLI_SERVICE_PROTOCOL_V4 + + // V5 adds error details when GetOperationStatus returns in error state + HIVE_CLI_SERVICE_PROTOCOL_V5 } enum TTypeId { @@ -386,12 +389,11 @@ enum TOperationState { // The operation is in an unrecognized state UKNOWN_STATE, - + // The operation is in an pending state PENDING_STATE, } - // A string identifier. This is interpreted literally. typedef string TIdentifier @@ -482,11 +484,11 @@ struct TOperationHandle { // OpenSession() // // Open a session (connection) on the server against -// which operations may be executed. +// which operations may be executed. struct TOpenSessionReq { // The version of the HiveServer2 protocol that the client is using. - 1: required TProtocolVersion client_protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3 - + 1: required TProtocolVersion client_protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5 + // Username and password for authentication. // Depending on the authentication scheme being used, // this information may instead be provided by a lower @@ -504,7 +506,7 @@ struct TOpenSessionResp { 1: required TStatus status // The protocol version that the server is using. - 2: required TProtocolVersion serverProtocolVersion = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V4 + 2: required TProtocolVersion serverProtocolVersion = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5 // Session Handle 3: optional TSessionHandle sessionHandle @@ -901,6 +903,16 @@ struct TGetOperationStatusReq { struct TGetOperationStatusResp { 1: required TStatus status 2: optional TOperationState operationState + + // If operationState is ERROR_STATE, then the following fields may be set + // sqlState as defined in the ISO/IEF CLI specification + 3: optional string sqlState + + // Internal error code + 4: optional i32 errorCode + + // Error message + 5: optional string errorMessage } diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java b/service/src/java/org/apache/hive/service/cli/CLIService.java index 8c85386..e4f9d24 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIService.java +++ b/service/src/java/org/apache/hive/service/cli/CLIService.java @@ -46,327 +46,327 @@ */ public class CLIService extends CompositeService implements ICLIService { - private final Log LOG = LogFactory.getLog(CLIService.class.getName()); - - private HiveConf hiveConf; - private SessionManager sessionManager; - private IMetaStoreClient metastoreClient; - private String serverUserName = null; - - - public CLIService() { - super("CLIService"); - } - - @Override - public synchronized void init(HiveConf hiveConf) { - this.hiveConf = hiveConf; - - sessionManager = new SessionManager(); - addService(sessionManager); - try { - HiveAuthFactory.loginFromKeytab(hiveConf); - serverUserName = ShimLoader.getHadoopShims(). - getShortUserName(ShimLoader.getHadoopShims().getUGIForConf(hiveConf)); - } catch (IOException e) { - throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); - } catch (LoginException e) { - throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); - } - super.init(hiveConf); - } - - @Override - public synchronized void start() { - super.start(); - - try { - // make sure that the base scratch directories exists and writable - setupStagingDir(hiveConf.getVar(HiveConf.ConfVars.SCRATCHDIR), false); - setupStagingDir(hiveConf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR), true); - setupStagingDir(hiveConf.getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR), true); - } catch (IOException eIO) { - throw new ServiceException("Error setting stage directories", eIO); - } - - try { - // Initialize and test a connection to the metastore - metastoreClient = new HiveMetaStoreClient(hiveConf); - metastoreClient.getDatabases("default"); - } catch (Exception e) { - throw new ServiceException("Unable to connect to MetaStore!", e); - } - } - - @Override - public synchronized void stop() { - if (metastoreClient != null) { - metastoreClient.close(); - } - super.stop(); - } - - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) - */ - @Override - public SessionHandle openSession(String username, String password, Map configuration) - throws HiveSQLException { - SessionHandle sessionHandle = sessionManager.openSession(username, password, configuration, false, null); - LOG.info(sessionHandle + ": openSession()"); - return sessionHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) - */ - @Override - public SessionHandle openSessionWithImpersonation(String username, String password, Map configuration, - String delegationToken) throws HiveSQLException { - SessionHandle sessionHandle = sessionManager.openSession(username, password, configuration, - true, delegationToken); - LOG.info(sessionHandle + ": openSession()"); - return sessionHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public void closeSession(SessionHandle sessionHandle) - throws HiveSQLException { - sessionManager.closeSession(sessionHandle); - LOG.info(sessionHandle + ": closeSession()"); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) - */ - @Override - public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType) - throws HiveSQLException { - GetInfoValue infoValue = sessionManager.getSession(sessionHandle).getInfo(getInfoType); - LOG.info(sessionHandle + ": getInfo()"); - return infoValue; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, - * java.lang.String, java.util.Map) - */ - @Override - public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, - Map confOverlay) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle) - .executeStatement(statement, confOverlay); - LOG.info(sessionHandle + ": executeStatement()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, - * java.lang.String, java.util.Map) - */ - @Override - public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, - Map confOverlay) throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle) - .executeStatementAsync(statement, confOverlay); - LOG.info(sessionHandle + ": executeStatementAsync()"); - return opHandle; - } - - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getTypeInfo(SessionHandle sessionHandle) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle).getTypeInfo(); - LOG.info(sessionHandle + ": getTypeInfo()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getCatalogs(SessionHandle sessionHandle) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle).getCatalogs(); - LOG.info(sessionHandle + ": getCatalogs()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) - */ - @Override - public OperationHandle getSchemas(SessionHandle sessionHandle, - String catalogName, String schemaName) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle) - .getSchemas(catalogName, schemaName); - LOG.info(sessionHandle + ": getSchemas()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) - */ - @Override - public OperationHandle getTables(SessionHandle sessionHandle, - String catalogName, String schemaName, String tableName, List tableTypes) - throws HiveSQLException { - OperationHandle opHandle = sessionManager - .getSession(sessionHandle).getTables(catalogName, schemaName, tableName, tableTypes); - LOG.info(sessionHandle + ": getTables()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getTableTypes(SessionHandle sessionHandle) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle).getTableTypes(); - LOG.info(sessionHandle + ": getTableTypes()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getColumns(SessionHandle sessionHandle, - String catalogName, String schemaName, String tableName, String columnName) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle) - .getColumns(catalogName, schemaName, tableName, columnName); - LOG.info(sessionHandle + ": getColumns()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getFunctions(SessionHandle sessionHandle, - String catalogName, String schemaName, String functionName) - throws HiveSQLException { - OperationHandle opHandle = sessionManager.getSession(sessionHandle) - .getFunctions(catalogName, schemaName, functionName); - LOG.info(sessionHandle + ": getFunctions()"); - return opHandle; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public OperationState getOperationStatus(OperationHandle opHandle) - throws HiveSQLException { - OperationState opState = sessionManager.getOperationManager().getOperationState(opHandle); - LOG.info(opHandle + ": getOperationStatus()"); - return opState; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public void cancelOperation(OperationHandle opHandle) - throws HiveSQLException { - sessionManager.getOperationManager().getOperation(opHandle). - getParentSession().cancelOperation(opHandle); - LOG.info(opHandle + ": cancelOperation()"); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public void closeOperation(OperationHandle opHandle) - throws HiveSQLException { - sessionManager.getOperationManager().getOperation(opHandle). - getParentSession().closeOperation(opHandle); - LOG.info(opHandle + ": closeOperation"); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public TableSchema getResultSetMetadata(OperationHandle opHandle) - throws HiveSQLException { - TableSchema tableSchema = sessionManager.getOperationManager().getOperation(opHandle). - getParentSession().getResultSetMetadata(opHandle); - LOG.info(opHandle + ": getResultSetMetadata()"); - return tableSchema; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) - */ - @Override - public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) - throws HiveSQLException { - RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle). - getParentSession().fetchResults(opHandle, orientation, maxRows); - LOG.info(opHandle + ": fetchResults()"); - return rowSet; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public RowSet fetchResults(OperationHandle opHandle) - throws HiveSQLException { - RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle). - getParentSession().fetchResults(opHandle); - LOG.info(opHandle + ": fetchResults()"); - return rowSet; - } - - // obtain delegation token for the give user from metastore - public synchronized String getDelegationTokenFromMetaStore(String owner) - throws HiveSQLException, UnsupportedOperationException, LoginException, IOException { - if (!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL) || - !hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) { - throw new UnsupportedOperationException( - "delegation token is can only be obtained for a secure remote metastore"); - } - - try { - Hive.closeCurrent(); - return Hive.get(hiveConf).getDelegationToken(owner, owner); - } catch (HiveException e) { - if (e.getCause() instanceof UnsupportedOperationException) { - throw (UnsupportedOperationException)e.getCause(); - } else { - throw new HiveSQLException("Error connect metastore to setup impersonation", e); - } - } - } - - // create the give Path if doesn't exists and make it writable - private void setupStagingDir(String dirPath, boolean isLocal) throws IOException { - Path scratchDir = new Path(dirPath); - FileSystem fs; - if (isLocal) { - fs = FileSystem.getLocal(hiveConf); - } else { - fs = scratchDir.getFileSystem(hiveConf); - } - if (!fs.exists(scratchDir)) { - fs.mkdirs(scratchDir); - FsPermission fsPermission = new FsPermission((short)0777); - fs.setPermission(scratchDir, fsPermission); - } - } + private final Log LOG = LogFactory.getLog(CLIService.class.getName()); + + private HiveConf hiveConf; + private SessionManager sessionManager; + private IMetaStoreClient metastoreClient; + private String serverUserName = null; + + + public CLIService() { + super("CLIService"); + } + + @Override + public synchronized void init(HiveConf hiveConf) { + this.hiveConf = hiveConf; + + sessionManager = new SessionManager(); + addService(sessionManager); + try { + HiveAuthFactory.loginFromKeytab(hiveConf); + serverUserName = ShimLoader.getHadoopShims(). + getShortUserName(ShimLoader.getHadoopShims().getUGIForConf(hiveConf)); + } catch (IOException e) { + throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); + } catch (LoginException e) { + throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); + } + super.init(hiveConf); + } + + @Override + public synchronized void start() { + super.start(); + + try { + // make sure that the base scratch directories exists and writable + setupStagingDir(hiveConf.getVar(HiveConf.ConfVars.SCRATCHDIR), false); + setupStagingDir(hiveConf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR), true); + setupStagingDir(hiveConf.getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR), true); + } catch (IOException eIO) { + throw new ServiceException("Error setting stage directories", eIO); + } + + try { + // Initialize and test a connection to the metastore + metastoreClient = new HiveMetaStoreClient(hiveConf); + metastoreClient.getDatabases("default"); + } catch (Exception e) { + throw new ServiceException("Unable to connect to MetaStore!", e); + } + } + + @Override + public synchronized void stop() { + if (metastoreClient != null) { + metastoreClient.close(); + } + super.stop(); + } + + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) + */ + @Override + public SessionHandle openSession(String username, String password, Map configuration) + throws HiveSQLException { + SessionHandle sessionHandle = sessionManager.openSession(username, password, configuration, false, null); + LOG.info(sessionHandle + ": openSession()"); + return sessionHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) + */ + @Override + public SessionHandle openSessionWithImpersonation(String username, String password, Map configuration, + String delegationToken) throws HiveSQLException { + SessionHandle sessionHandle = sessionManager.openSession(username, password, configuration, + true, delegationToken); + LOG.info(sessionHandle + ": openSession()"); + return sessionHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public void closeSession(SessionHandle sessionHandle) + throws HiveSQLException { + sessionManager.closeSession(sessionHandle); + LOG.info(sessionHandle + ": closeSession()"); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) + */ + @Override + public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType) + throws HiveSQLException { + GetInfoValue infoValue = sessionManager.getSession(sessionHandle).getInfo(getInfoType); + LOG.info(sessionHandle + ": getInfo()"); + return infoValue; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, + * java.lang.String, java.util.Map) + */ + @Override + public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, + Map confOverlay) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle) + .executeStatement(statement, confOverlay); + LOG.info(sessionHandle + ": executeStatement()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, + * java.lang.String, java.util.Map) + */ + @Override + public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, + Map confOverlay) throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle) + .executeStatementAsync(statement, confOverlay); + LOG.info(sessionHandle + ": executeStatementAsync()"); + return opHandle; + } + + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getTypeInfo(SessionHandle sessionHandle) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle).getTypeInfo(); + LOG.info(sessionHandle + ": getTypeInfo()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getCatalogs(SessionHandle sessionHandle) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle).getCatalogs(); + LOG.info(sessionHandle + ": getCatalogs()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) + */ + @Override + public OperationHandle getSchemas(SessionHandle sessionHandle, + String catalogName, String schemaName) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle) + .getSchemas(catalogName, schemaName); + LOG.info(sessionHandle + ": getSchemas()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) + */ + @Override + public OperationHandle getTables(SessionHandle sessionHandle, + String catalogName, String schemaName, String tableName, List tableTypes) + throws HiveSQLException { + OperationHandle opHandle = sessionManager + .getSession(sessionHandle).getTables(catalogName, schemaName, tableName, tableTypes); + LOG.info(sessionHandle + ": getTables()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getTableTypes(SessionHandle sessionHandle) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle).getTableTypes(); + LOG.info(sessionHandle + ": getTableTypes()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getColumns(SessionHandle sessionHandle, + String catalogName, String schemaName, String tableName, String columnName) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle) + .getColumns(catalogName, schemaName, tableName, columnName); + LOG.info(sessionHandle + ": getColumns()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getFunctions(SessionHandle sessionHandle, + String catalogName, String schemaName, String functionName) + throws HiveSQLException { + OperationHandle opHandle = sessionManager.getSession(sessionHandle) + .getFunctions(catalogName, schemaName, functionName); + LOG.info(sessionHandle + ": getFunctions()"); + return opHandle; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public OperationStatus getOperationStatus(OperationHandle opHandle) + throws HiveSQLException { + OperationStatus opStatus = sessionManager.getOperationManager().getOperationStatus(opHandle); + LOG.info(opHandle + ": getOperationStatus()"); + return opStatus; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public void cancelOperation(OperationHandle opHandle) + throws HiveSQLException { + sessionManager.getOperationManager().getOperation(opHandle). + getParentSession().cancelOperation(opHandle); + LOG.info(opHandle + ": cancelOperation()"); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public void closeOperation(OperationHandle opHandle) + throws HiveSQLException { + sessionManager.getOperationManager().getOperation(opHandle). + getParentSession().closeOperation(opHandle); + LOG.info(opHandle + ": closeOperation"); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public TableSchema getResultSetMetadata(OperationHandle opHandle) + throws HiveSQLException { + TableSchema tableSchema = sessionManager.getOperationManager().getOperation(opHandle). + getParentSession().getResultSetMetadata(opHandle); + LOG.info(opHandle + ": getResultSetMetadata()"); + return tableSchema; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) + */ + @Override + public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) + throws HiveSQLException { + RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle). + getParentSession().fetchResults(opHandle, orientation, maxRows); + LOG.info(opHandle + ": fetchResults()"); + return rowSet; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public RowSet fetchResults(OperationHandle opHandle) + throws HiveSQLException { + RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle). + getParentSession().fetchResults(opHandle); + LOG.info(opHandle + ": fetchResults()"); + return rowSet; + } + + // obtain delegation token for the give user from metastore + public synchronized String getDelegationTokenFromMetaStore(String owner) + throws HiveSQLException, UnsupportedOperationException, LoginException, IOException { + if (!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL) || + !hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) { + throw new UnsupportedOperationException( + "delegation token is can only be obtained for a secure remote metastore"); + } + + try { + Hive.closeCurrent(); + return Hive.get(hiveConf).getDelegationToken(owner, owner); + } catch (HiveException e) { + if (e.getCause() instanceof UnsupportedOperationException) { + throw (UnsupportedOperationException)e.getCause(); + } else { + throw new HiveSQLException("Error connect metastore to setup impersonation", e); + } + } + } + + // create the give Path if doesn't exists and make it writable + private void setupStagingDir(String dirPath, boolean isLocal) throws IOException { + Path scratchDir = new Path(dirPath); + FileSystem fs; + if (isLocal) { + fs = FileSystem.getLocal(hiveConf); + } else { + fs = scratchDir.getFileSystem(hiveConf); + } + if (!fs.exists(scratchDir)) { + fs.mkdirs(scratchDir); + FsPermission fsPermission = new FsPermission((short)0777); + fs.setPermission(scratchDir, fsPermission); + } + } } diff --git a/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java b/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java index 14ef54f..4fc47f7 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java +++ b/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java @@ -19,8 +19,6 @@ package org.apache.hive.service.cli; import java.util.Collections; -import java.util.List; -import java.util.Map; /** @@ -29,133 +27,18 @@ */ public abstract class CLIServiceClient implements ICLIService { - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) - */ - @Override - public abstract SessionHandle openSession(String username, String password, - Map configuration) throws HiveSQLException; - - - public SessionHandle openSession(String username, String password) - throws HiveSQLException { - return openSession(username, password, Collections.emptyMap()); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public abstract void closeSession(SessionHandle sessionHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) - */ - @Override - public abstract GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType) - throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, - * java.lang.String, java.util.Map) - */ - @Override - public abstract OperationHandle executeStatement(SessionHandle sessionHandle, String statement, - Map confOverlay) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, - * java.lang.String, java.util.Map) - */ - @Override - public abstract OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, - Map confOverlay) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public abstract OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public abstract OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) - */ - @Override - public abstract OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName, - String schemaName) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) - */ - @Override - public abstract OperationHandle getTables(SessionHandle sessionHandle, String catalogName, - String schemaName, String tableName, List tableTypes) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public abstract OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.lang.String) - */ - @Override - public abstract OperationHandle getColumns(SessionHandle sessionHandle, String catalogName, - String schemaName, String tableName, String columnName) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle, java.lang.String) - */ - @Override - public abstract OperationHandle getFunctions(SessionHandle sessionHandle, - String catalogName, String schemaName, String functionName) - throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public abstract OperationState getOperationStatus(OperationHandle opHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public abstract void cancelOperation(OperationHandle opHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public abstract void closeOperation(OperationHandle opHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public abstract TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) - */ - @Override - public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) - throws HiveSQLException; - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { - // TODO: provide STATIC default value - return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000); - } + public SessionHandle openSession(String username, String password) + throws HiveSQLException { + return openSession(username, password, Collections.emptyMap()); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { + // TODO: provide STATIC default value + return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000); + } } diff --git a/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java b/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java index 9dca874..f13a416 100644 --- a/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java +++ b/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java @@ -27,165 +27,165 @@ * */ public class EmbeddedCLIServiceClient extends CLIServiceClient { - private final ICLIService cliService; - - public EmbeddedCLIServiceClient(ICLIService cliService) { - this.cliService = cliService; - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#openSession(java.lang.String, java.lang.String, java.util.Map) - */ - @Override - public SessionHandle openSession(String username, String password, - Map configuration) throws HiveSQLException { - return cliService.openSession(username, password, configuration); - } - - @Override - public SessionHandle openSessionWithImpersonation(String username, String password, - Map configuration, String delegationToken) throws HiveSQLException { - throw new HiveSQLException("Impersonated session is not supported in the embedded mode"); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#closeSession(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { - cliService.closeSession(sessionHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) - */ - @Override - public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType) - throws HiveSQLException { - return cliService.getInfo(sessionHandle, getInfoType); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#executeStatement(org.apache.hive.service.cli.SessionHandle, - * java.lang.String, java.util.Map) - */ - @Override - public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, - Map confOverlay) throws HiveSQLException { - return cliService.executeStatement(sessionHandle, statement, confOverlay); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, - * java.lang.String, java.util.Map) - */ - @Override - public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, - Map confOverlay) throws HiveSQLException { - return cliService.executeStatementAsync(sessionHandle, statement, confOverlay); - } - - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getTypeInfo(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException { - return cliService.getTypeInfo(sessionHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getCatalogs(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException { - return cliService.getCatalogs(sessionHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) - */ - @Override - public OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName, - String schemaName) throws HiveSQLException { - return cliService.getSchemas(sessionHandle, catalogName, schemaName); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) - */ - @Override - public OperationHandle getTables(SessionHandle sessionHandle, String catalogName, - String schemaName, String tableName, List tableTypes) throws HiveSQLException { - return cliService.getTables(sessionHandle, catalogName, schemaName, tableName, tableTypes); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getTableTypes(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException { - return cliService.getTableTypes(sessionHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getColumns(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.lang.String) - */ - @Override - public OperationHandle getColumns(SessionHandle sessionHandle, String catalogName, - String schemaName, String tableName, String columnName) throws HiveSQLException { - return cliService.getColumns(sessionHandle, catalogName, schemaName, tableName, columnName); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getFunctions(org.apache.hive.service.cli.SessionHandle, java.lang.String) - */ - @Override - public OperationHandle getFunctions(SessionHandle sessionHandle, - String catalogName, String schemaName, String functionName) - throws HiveSQLException { - return cliService.getFunctions(sessionHandle, catalogName, schemaName, functionName); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getOperationStatus(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public OperationState getOperationStatus(OperationHandle opHandle) throws HiveSQLException { - return cliService.getOperationStatus(opHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#cancelOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { - cliService.cancelOperation(opHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#closeOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public void closeOperation(OperationHandle opHandle) throws HiveSQLException { - cliService.closeOperation(opHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { - return cliService.getResultSetMetadata(opHandle); - } - - /* (non-Javadoc) - * @see org.apache.hive.service.cli.CLIServiceClient#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) - */ - @Override - public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) - throws HiveSQLException { - return cliService.fetchResults(opHandle, orientation, maxRows); - } + private final ICLIService cliService; + + public EmbeddedCLIServiceClient(ICLIService cliService) { + this.cliService = cliService; + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#openSession(java.lang.String, java.lang.String, java.util.Map) + */ + @Override + public SessionHandle openSession(String username, String password, + Map configuration) throws HiveSQLException { + return cliService.openSession(username, password, configuration); + } + + @Override + public SessionHandle openSessionWithImpersonation(String username, String password, + Map configuration, String delegationToken) throws HiveSQLException { + throw new HiveSQLException("Impersonated session is not supported in the embedded mode"); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#closeSession(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { + cliService.closeSession(sessionHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) + */ + @Override + public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType getInfoType) + throws HiveSQLException { + return cliService.getInfo(sessionHandle, getInfoType); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#executeStatement(org.apache.hive.service.cli.SessionHandle, + * java.lang.String, java.util.Map) + */ + @Override + public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, + Map confOverlay) throws HiveSQLException { + return cliService.executeStatement(sessionHandle, statement, confOverlay); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, + * java.lang.String, java.util.Map) + */ + @Override + public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, + Map confOverlay) throws HiveSQLException { + return cliService.executeStatementAsync(sessionHandle, statement, confOverlay); + } + + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getTypeInfo(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException { + return cliService.getTypeInfo(sessionHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getCatalogs(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException { + return cliService.getCatalogs(sessionHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) + */ + @Override + public OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName, + String schemaName) throws HiveSQLException { + return cliService.getSchemas(sessionHandle, catalogName, schemaName); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) + */ + @Override + public OperationHandle getTables(SessionHandle sessionHandle, String catalogName, + String schemaName, String tableName, List tableTypes) throws HiveSQLException { + return cliService.getTables(sessionHandle, catalogName, schemaName, tableName, tableTypes); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getTableTypes(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException { + return cliService.getTableTypes(sessionHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getColumns(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationHandle getColumns(SessionHandle sessionHandle, String catalogName, + String schemaName, String tableName, String columnName) throws HiveSQLException { + return cliService.getColumns(sessionHandle, catalogName, schemaName, tableName, columnName); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getFunctions(org.apache.hive.service.cli.SessionHandle, java.lang.String) + */ + @Override + public OperationHandle getFunctions(SessionHandle sessionHandle, + String catalogName, String schemaName, String functionName) + throws HiveSQLException { + return cliService.getFunctions(sessionHandle, catalogName, schemaName, functionName); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getOperationStatus(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public OperationStatus getOperationStatus(OperationHandle opHandle) throws HiveSQLException { + return cliService.getOperationStatus(opHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#cancelOperation(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { + cliService.cancelOperation(opHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#closeOperation(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public void closeOperation(OperationHandle opHandle) throws HiveSQLException { + cliService.closeOperation(opHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { + return cliService.getResultSetMetadata(opHandle); + } + + /* (non-Javadoc) + * @see org.apache.hive.service.cli.CLIServiceClient#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) + */ + @Override + public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) + throws HiveSQLException { + return cliService.fetchResults(opHandle, orientation, maxRows); + } } diff --git a/service/src/java/org/apache/hive/service/cli/HiveSQLException.java b/service/src/java/org/apache/hive/service/cli/HiveSQLException.java index 74e8b94..bfe35ae 100644 --- a/service/src/java/org/apache/hive/service/cli/HiveSQLException.java +++ b/service/src/java/org/apache/hive/service/cli/HiveSQLException.java @@ -29,96 +29,96 @@ */ public class HiveSQLException extends SQLException { - /** - * - */ - private static final long serialVersionUID = -6095254671958748094L; - - /** - * - */ - public HiveSQLException() { - super(); - } - - /** - * @param reason - */ - public HiveSQLException(String reason) { - super(reason); - } - - /** - * @param cause - */ - public HiveSQLException(Throwable cause) { - super(cause); - } - - /** - * @param reason - * @param sqlState - */ - public HiveSQLException(String reason, String sqlState) { - super(reason, sqlState); - } - - /** - * @param reason - * @param cause - */ - public HiveSQLException(String reason, Throwable cause) { - super(reason, cause); - } - - /** - * @param reason - * @param sqlState - * @param vendorCode - */ - public HiveSQLException(String reason, String sqlState, int vendorCode) { - super(reason, sqlState, vendorCode); - } - - /** - * @param reason - * @param sqlState - * @param cause - */ - public HiveSQLException(String reason, String sqlState, Throwable cause) { - super(reason, sqlState, cause); - } - - /** - * @param reason - * @param sqlState - * @param vendorCode - * @param cause - */ - public HiveSQLException(String reason, String sqlState, int vendorCode, Throwable cause) { - super(reason, sqlState, vendorCode, cause); - } - - public HiveSQLException(TStatus status) { - // TODO: set correct vendorCode field - super(status.getErrorMessage(), status.getSqlState(), 1); - } - - public TStatus toTStatus() { - // TODO: convert sqlState, etc. - TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS); - tStatus.setSqlState(getSQLState()); - tStatus.setErrorCode(getErrorCode()); - tStatus.setErrorMessage(getMessage()); - return tStatus; - } - - public static TStatus toTStatus(Exception e) { - if (e instanceof HiveSQLException) { - return ((HiveSQLException)e).toTStatus(); - } - TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS); - return tStatus; - } + /** + * + */ + private static final long serialVersionUID = -6095254671958748094L; + + /** + * + */ + public HiveSQLException() { + super(); + } + + /** + * @param reason + */ + public HiveSQLException(String reason) { + super(reason); + } + + /** + * @param cause + */ + public HiveSQLException(Throwable cause) { + super(cause); + } + + /** + * @param reason + * @param sqlState + */ + public HiveSQLException(String reason, String sqlState) { + super(reason, sqlState); + } + + /** + * @param reason + * @param cause + */ + public HiveSQLException(String reason, Throwable cause) { + super(reason, cause); + } + + /** + * @param reason + * @param sqlState + * @param vendorCode + */ + public HiveSQLException(String reason, String sqlState, int vendorCode) { + super(reason, sqlState, vendorCode); + } + + /** + * @param reason + * @param sqlState + * @param cause + */ + public HiveSQLException(String reason, String sqlState, Throwable cause) { + super(reason, sqlState, cause); + } + + /** + * @param reason + * @param sqlState + * @param vendorCode + * @param cause + */ + public HiveSQLException(String reason, String sqlState, int vendorCode, Throwable cause) { + super(reason, sqlState, vendorCode, cause); + } + + public HiveSQLException(TStatus status) { + // TODO: set correct vendorCode field + super(status.getErrorMessage(), status.getSqlState(), 1); + } + + public TStatus toTStatus() { + // TODO: convert sqlState, etc. + TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS); + tStatus.setSqlState(getSQLState()); + tStatus.setErrorCode(getErrorCode()); + tStatus.setErrorMessage(getMessage()); + return tStatus; + } + + public static TStatus toTStatus(Exception e) { + if (e instanceof HiveSQLException) { + return ((HiveSQLException)e).toTStatus(); + } + TStatus tStatus = new TStatus(TStatusCode.ERROR_STATUS); + return tStatus; + } } diff --git a/service/src/java/org/apache/hive/service/cli/ICLIService.java b/service/src/java/org/apache/hive/service/cli/ICLIService.java index f647ce6..2b1712d 100644 --- a/service/src/java/org/apache/hive/service/cli/ICLIService.java +++ b/service/src/java/org/apache/hive/service/cli/ICLIService.java @@ -72,7 +72,7 @@ public abstract OperationHandle getFunctions(SessionHandle sessionHandle, String catalogName, String schemaName, String functionName) throws HiveSQLException; - public abstract OperationState getOperationStatus(OperationHandle opHandle) + public abstract OperationStatus getOperationStatus(OperationHandle opHandle) throws HiveSQLException; public abstract void cancelOperation(OperationHandle opHandle) diff --git a/service/src/java/org/apache/hive/service/cli/OperationState.java b/service/src/java/org/apache/hive/service/cli/OperationState.java index 1ec6bd1..b8446e1 100644 --- a/service/src/java/org/apache/hive/service/cli/OperationState.java +++ b/service/src/java/org/apache/hive/service/cli/OperationState.java @@ -25,80 +25,80 @@ * */ public enum OperationState { - INITIALIZED(TOperationState.INITIALIZED_STATE), - RUNNING(TOperationState.RUNNING_STATE), - FINISHED(TOperationState.FINISHED_STATE), - CANCELED(TOperationState.CANCELED_STATE), - CLOSED(TOperationState.CLOSED_STATE), - ERROR(TOperationState.ERROR_STATE), - UNKNOWN(TOperationState.UKNOWN_STATE), - PENDING(TOperationState.PENDING_STATE); + INITIALIZED(TOperationState.INITIALIZED_STATE), + RUNNING(TOperationState.RUNNING_STATE), + FINISHED(TOperationState.FINISHED_STATE), + CANCELED(TOperationState.CANCELED_STATE), + CLOSED(TOperationState.CLOSED_STATE), + ERROR(TOperationState.ERROR_STATE), + UNKNOWN(TOperationState.UKNOWN_STATE), + PENDING(TOperationState.PENDING_STATE); - private final TOperationState tOperationState; + private final TOperationState tOperationState; - OperationState(TOperationState tOperationState) { - this.tOperationState = tOperationState; - } + OperationState(TOperationState tOperationState) { + this.tOperationState = tOperationState; + } - public static OperationState getOperationState(TOperationState tOperationState) { - // TODO: replace this with a Map? - for (OperationState opState : values()) { - if (tOperationState.equals(opState.tOperationState)) { - return opState; - } - } - return OperationState.UNKNOWN; - } + public static OperationState getOperationState(TOperationState tOperationState) { + // TODO: replace this with a Map? + for (OperationState opState : values()) { + if (tOperationState.equals(opState.tOperationState)) { + return opState; + } + } + return OperationState.UNKNOWN; + } - public static void validateTransition(OperationState oldState, OperationState newState) - throws HiveSQLException { - switch (oldState) { - case INITIALIZED: - switch (newState) { - case PENDING: - case RUNNING: - case CLOSED: - return; - } - break; - case PENDING: - switch (newState) { - case RUNNING: - case FINISHED: - case CANCELED: - case ERROR: - case CLOSED: - return; - } - break; - case RUNNING: - switch (newState) { - case FINISHED: - case CANCELED: - case ERROR: - case CLOSED: - return; - } - break; - case FINISHED: - case CANCELED: - case ERROR: - if (OperationState.CLOSED.equals(newState)) { - return; - } - default: - // fall-through - } - throw new HiveSQLException("Illegal Operation state transition"); - } + public static void validateTransition(OperationState oldState, OperationState newState) + throws HiveSQLException { + switch (oldState) { + case INITIALIZED: + switch (newState) { + case PENDING: + case RUNNING: + case CLOSED: + return; + } + break; + case PENDING: + switch (newState) { + case RUNNING: + case FINISHED: + case CANCELED: + case ERROR: + case CLOSED: + return; + } + break; + case RUNNING: + switch (newState) { + case FINISHED: + case CANCELED: + case ERROR: + case CLOSED: + return; + } + break; + case FINISHED: + case CANCELED: + case ERROR: + if (OperationState.CLOSED.equals(newState)) { + return; + } + default: + // fall-through + } + throw new HiveSQLException("Illegal Operation state transition"); + } - public void validateTransition(OperationState newState) - throws HiveSQLException { - validateTransition(this, newState); - } + public void validateTransition(OperationState newState) + throws HiveSQLException { + validateTransition(this, newState); + } - public TOperationState toTOperationState() { - return tOperationState; - } + public TOperationState toTOperationState() { + return tOperationState; + } } diff --git a/service/src/java/org/apache/hive/service/cli/OperationStatus.java b/service/src/java/org/apache/hive/service/cli/OperationStatus.java new file mode 100644 index 0000000..3289008 --- /dev/null +++ b/service/src/java/org/apache/hive/service/cli/OperationStatus.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.cli; + +/** + * OperationStatus. + * + */ +public class OperationStatus { + + private final OperationState state; + private final HiveSQLException operationException; + + public OperationStatus(OperationState state, HiveSQLException operationException) { + this.state = state; + this.operationException = operationException; + } + + public OperationState getState() { + return state; + } + + public HiveSQLException getOperationException() { + return operationException; + } + +} \ No newline at end of file diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java index 6f4b8dc..5bbf906 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -24,6 +24,7 @@ import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationHandle; import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; import org.apache.hive.service.cli.OperationType; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.TableSchema; @@ -32,96 +33,101 @@ public abstract class Operation { - protected final HiveSession parentSession; - private OperationState state = OperationState.INITIALIZED; - private final OperationHandle opHandle; - private HiveConf configuration; - public static final Log LOG = LogFactory.getLog(Operation.class.getName()); - public static final long DEFAULT_FETCH_MAX_ROWS = 100; - protected boolean hasResultSet; - - protected Operation(HiveSession parentSession, OperationType opType) { - super(); - this.parentSession = parentSession; - opHandle = new OperationHandle(opType); - } - - public void setConfiguration(HiveConf configuration) { - this.configuration = new HiveConf(configuration); - } - - public HiveConf getConfiguration() { - return new HiveConf(configuration); - } - - public HiveSession getParentSession() { - return parentSession; - } - - public OperationHandle getHandle() { - return opHandle; - } - - public OperationType getType() { - return opHandle.getOperationType(); - } - - public OperationState getState() { - return state; - } - - public boolean hasResultSet() { - return hasResultSet; - } - - protected void setHasResultSet(boolean hasResultSet) { - this.hasResultSet = hasResultSet; - opHandle.setHasResultSet(hasResultSet); - } - - protected final OperationState setState(OperationState newState) throws HiveSQLException { - state.validateTransition(newState); - this.state = newState; - return this.state; - } - - protected final void assertState(OperationState state) throws HiveSQLException { - if (this.state != state) { - throw new HiveSQLException("Expected state " + state + ", but found " + this.state); - } - } - - public boolean isRunning() { - return OperationState.RUNNING.equals(getState()); - } - - public boolean isFinished() { - return OperationState.FINISHED.equals(getState()); - } - - public boolean isCanceled() { - return OperationState.CANCELED.equals(getState()); - } - - public boolean isFailed() { - return OperationState.ERROR.equals(getState()); - } - - public abstract void run() throws HiveSQLException; - - // TODO: make this abstract and implement in subclasses. - public void cancel() throws HiveSQLException { - setState(OperationState.CANCELED); - throw new UnsupportedOperationException("SQLOperation.cancel()"); - } - - public abstract void close() throws HiveSQLException; - - public abstract TableSchema getResultSetSchema() throws HiveSQLException; - - public abstract RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException; - - public RowSet getNextRowSet() throws HiveSQLException { - return getNextRowSet(FetchOrientation.FETCH_NEXT, DEFAULT_FETCH_MAX_ROWS); - } + protected final HiveSession parentSession; + private OperationState state = OperationState.INITIALIZED; + private final OperationHandle opHandle; + private HiveConf configuration; + public static final Log LOG = LogFactory.getLog(Operation.class.getName()); + public static final long DEFAULT_FETCH_MAX_ROWS = 100; + protected boolean hasResultSet; + protected volatile HiveSQLException operationException; + + protected Operation(HiveSession parentSession, OperationType opType) { + super(); + this.parentSession = parentSession; + opHandle = new OperationHandle(opType); + } + + public void setConfiguration(HiveConf configuration) { + this.configuration = new HiveConf(configuration); + } + + public HiveConf getConfiguration() { + return new HiveConf(configuration); + } + + public HiveSession getParentSession() { + return parentSession; + } + + public OperationHandle getHandle() { + return opHandle; + } + + public OperationType getType() { + return opHandle.getOperationType(); + } + + public OperationStatus getStatus() { + return new OperationStatus(state, operationException); + } + + public boolean hasResultSet() { + return hasResultSet; + } + + protected void setHasResultSet(boolean hasResultSet) { + this.hasResultSet = hasResultSet; + opHandle.setHasResultSet(hasResultSet); + } + + protected final OperationState setState(OperationState newState) throws HiveSQLException { + state.validateTransition(newState); + this.state = newState; + return this.state; + } + + protected void setOperationException(HiveSQLException operationException) { + this.operationException = operationException; + } + + protected final void assertState(OperationState state) throws HiveSQLException { + if (this.state != state) { + throw new HiveSQLException("Expected state " + state + ", but found " + this.state); + } + } + + public boolean isRunning() { + return OperationState.RUNNING.equals(state); + } + + public boolean isFinished() { + return OperationState.FINISHED.equals(state); + } + + public boolean isCanceled() { + return OperationState.CANCELED.equals(state); + } + + public boolean isFailed() { + return OperationState.ERROR.equals(state); + } + + public abstract void run() throws HiveSQLException; + + // TODO: make this abstract and implement in subclasses. + public void cancel() throws HiveSQLException { + setState(OperationState.CANCELED); + throw new UnsupportedOperationException("SQLOperation.cancel()"); + } + + public abstract void close() throws HiveSQLException; + + public abstract TableSchema getResultSetSchema() throws HiveSQLException; + + public abstract RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException; + + public RowSet getNextRowSet() throws HiveSQLException { + return getNextRowSet(FetchOrientation.FETCH_NEXT, DEFAULT_FETCH_MAX_ROWS); + } } diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index bcdb67f..eb7be51 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -27,7 +27,7 @@ import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationHandle; -import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; @@ -38,136 +38,136 @@ */ public class OperationManager extends AbstractService { - private HiveConf hiveConf; - private final Map handleToOperation = - new HashMap(); - - public OperationManager() { - super("OperationManager"); - } - - @Override - public synchronized void init(HiveConf hiveConf) { - this.hiveConf = hiveConf; - - super.init(hiveConf); - } - - @Override - public synchronized void start() { - super.start(); - // TODO - } - - @Override - public synchronized void stop() { - // TODO - super.stop(); - } - - public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, - String statement, Map confOverlay, boolean runAsync) - throws HiveSQLException { - ExecuteStatementOperation executeStatementOperation = ExecuteStatementOperation - .newExecuteStatementOperation(parentSession, statement, confOverlay, runAsync); - addOperation(executeStatementOperation); - return executeStatementOperation; - } - - public GetTypeInfoOperation newGetTypeInfoOperation(HiveSession parentSession) { - GetTypeInfoOperation operation = new GetTypeInfoOperation(parentSession); - addOperation(operation); - return operation; - } - - public GetCatalogsOperation newGetCatalogsOperation(HiveSession parentSession) { - GetCatalogsOperation operation = new GetCatalogsOperation(parentSession); - addOperation(operation); - return operation; - } - - public GetSchemasOperation newGetSchemasOperation(HiveSession parentSession, - String catalogName, String schemaName) { - GetSchemasOperation operation = new GetSchemasOperation(parentSession, catalogName, schemaName); - addOperation(operation); - return operation; - } - - public MetadataOperation newGetTablesOperation(HiveSession parentSession, - String catalogName, String schemaName, String tableName, - List tableTypes) { - MetadataOperation operation = - new GetTablesOperation(parentSession, catalogName, schemaName, tableName, tableTypes); - addOperation(operation); - return operation; - } - - public GetTableTypesOperation newGetTableTypesOperation(HiveSession parentSession) { - GetTableTypesOperation operation = new GetTableTypesOperation(parentSession); - addOperation(operation); - return operation; - } - - public GetColumnsOperation newGetColumnsOperation(HiveSession parentSession, - String catalogName, String schemaName, String tableName, String columnName) { - GetColumnsOperation operation = new GetColumnsOperation(parentSession, - catalogName, schemaName, tableName, columnName); - addOperation(operation); - return operation; - } - - public GetFunctionsOperation newGetFunctionsOperation(HiveSession parentSession, - String catalogName, String schemaName, String functionName) { - GetFunctionsOperation operation = new GetFunctionsOperation(parentSession, - catalogName, schemaName, functionName); - addOperation(operation); - return operation; - } - - public synchronized Operation getOperation(OperationHandle operationHandle) throws HiveSQLException { - Operation operation = handleToOperation.get(operationHandle); - if (operation == null) { - throw new HiveSQLException("Invalid OperationHandle: " + operationHandle); - } - return operation; - } - - private synchronized void addOperation(Operation operation) { - handleToOperation.put(operation.getHandle(), operation); - } - - private synchronized Operation removeOperation(OperationHandle opHandle) { - return handleToOperation.remove(opHandle); - } - - public OperationState getOperationState(OperationHandle opHandle) throws HiveSQLException { - return getOperation(opHandle).getState(); - } - - public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { - getOperation(opHandle).cancel(); - } - - public void closeOperation(OperationHandle opHandle) throws HiveSQLException { - Operation operation = removeOperation(opHandle); - if (operation == null) { - throw new HiveSQLException("Operation does not exist!"); - } - operation.close(); - } - - public TableSchema getOperationResultSetSchema(OperationHandle opHandle) - throws HiveSQLException { - return getOperation(opHandle).getResultSetSchema(); - } - - public RowSet getOperationNextRowSet(OperationHandle opHandle) throws HiveSQLException { - return getOperation(opHandle).getNextRowSet(); - } - - public RowSet getOperationNextRowSet(OperationHandle opHandle, - FetchOrientation orientation, long maxRows) - throws HiveSQLException { - return getOperation(opHandle).getNextRowSet(orientation, maxRows); - } + private HiveConf hiveConf; + private final Map handleToOperation = + new HashMap(); + + public OperationManager() { + super("OperationManager"); + } + + @Override + public synchronized void init(HiveConf hiveConf) { + this.hiveConf = hiveConf; + + super.init(hiveConf); + } + + @Override + public synchronized void start() { + super.start(); + // TODO + } + + @Override + public synchronized void stop() { + // TODO + super.stop(); + } + + public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, + String statement, Map confOverlay, boolean runAsync) + throws HiveSQLException { + ExecuteStatementOperation executeStatementOperation = ExecuteStatementOperation + .newExecuteStatementOperation(parentSession, statement, confOverlay, runAsync); + addOperation(executeStatementOperation); + return executeStatementOperation; + } + + public GetTypeInfoOperation newGetTypeInfoOperation(HiveSession parentSession) { + GetTypeInfoOperation operation = new GetTypeInfoOperation(parentSession); + addOperation(operation); + return operation; + } + + public GetCatalogsOperation newGetCatalogsOperation(HiveSession parentSession) { + GetCatalogsOperation operation = new GetCatalogsOperation(parentSession); + addOperation(operation); + return operation; + } + + public GetSchemasOperation newGetSchemasOperation(HiveSession parentSession, + String catalogName, String schemaName) { + GetSchemasOperation operation = new GetSchemasOperation(parentSession, catalogName, schemaName); + addOperation(operation); + return operation; + } + + public MetadataOperation newGetTablesOperation(HiveSession parentSession, + String catalogName, String schemaName, String tableName, + List tableTypes) { + MetadataOperation operation = + new GetTablesOperation(parentSession, catalogName, schemaName, tableName, tableTypes); + addOperation(operation); + return operation; + } + + public GetTableTypesOperation newGetTableTypesOperation(HiveSession parentSession) { + GetTableTypesOperation operation = new GetTableTypesOperation(parentSession); + addOperation(operation); + return operation; + } + + public GetColumnsOperation newGetColumnsOperation(HiveSession parentSession, + String catalogName, String schemaName, String tableName, String columnName) { + GetColumnsOperation operation = new GetColumnsOperation(parentSession, + catalogName, schemaName, tableName, columnName); + addOperation(operation); + return operation; + } + + public GetFunctionsOperation newGetFunctionsOperation(HiveSession parentSession, + String catalogName, String schemaName, String functionName) { + GetFunctionsOperation operation = new GetFunctionsOperation(parentSession, + catalogName, schemaName, functionName); + addOperation(operation); + return operation; + } + + public synchronized Operation getOperation(OperationHandle operationHandle) throws HiveSQLException { + Operation operation = handleToOperation.get(operationHandle); + if (operation == null) { + throw new HiveSQLException("Invalid OperationHandle: " + operationHandle); + } + return operation; + } + + private synchronized void addOperation(Operation operation) { + handleToOperation.put(operation.getHandle(), operation); + } + + private synchronized Operation removeOperation(OperationHandle opHandle) { + return handleToOperation.remove(opHandle); + } + + public OperationStatus getOperationStatus(OperationHandle opHandle) throws HiveSQLException { + return getOperation(opHandle).getStatus(); + } + + public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { + getOperation(opHandle).cancel(); + } + + public void closeOperation(OperationHandle opHandle) throws HiveSQLException { + Operation operation = removeOperation(opHandle); + if (operation == null) { + throw new HiveSQLException("Operation does not exist!"); + } + operation.close(); + } + + public TableSchema getOperationResultSetSchema(OperationHandle opHandle) + throws HiveSQLException { + return getOperation(opHandle).getResultSetSchema(); + } + + public RowSet getOperationNextRowSet(OperationHandle opHandle) throws HiveSQLException { + return getOperation(opHandle).getNextRowSet(); + } + + public RowSet getOperationNextRowSet(OperationHandle opHandle, + FetchOrientation orientation, long maxRows) + throws HiveSQLException { + return getOperation(opHandle).getNextRowSet(orientation, maxRows); + } } diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index 4ee1b74..1a68f44 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -51,6 +51,7 @@ import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; @@ -61,289 +62,275 @@ */ public class SQLOperation extends ExecuteStatementOperation { - private Driver driver = null; - private CommandProcessorResponse response; - private TableSchema resultSchema = null; - private Schema mResultSchema = null; - private SerDe serde = null; - private final boolean runAsync; - private Future backgroundHandle; - - public SQLOperation(HiveSession parentSession, String statement, Map confOverlay, boolean runInBackground) { - // TODO: call setRemoteUser in ExecuteStatementOperation or higher. - super(parentSession, statement, confOverlay); - this.runAsync = runInBackground; - } - - - public void prepare() throws HiveSQLException { - } - - private void runInternal(HiveConf sqlOperationConf) throws HiveSQLException { - setState(OperationState.RUNNING); - String statement_trimmed = statement.trim(); - String[] tokens = statement_trimmed.split("\\s"); - String cmd_1 = statement_trimmed.substring(tokens[0].length()).trim(); - - int ret = 0; - String errorMessage = ""; - String SQLState = null; - - try { - driver = new Driver(sqlOperationConf, getParentSession().getUserName()); - // In Hive server mode, we are not able to retry in the FetchTask - // case, when calling fetch queries since execute() has returned. - // For now, we disable the test attempts. - driver.setTryCount(Integer.MAX_VALUE); - - String subStatement = new VariableSubstitution().substitute(sqlOperationConf, statement); - - response = driver.run(subStatement); - if (0 != response.getResponseCode()) { - throw new HiveSQLException("Error while processing statement: " - + response.getErrorMessage(), response.getSQLState(), response.getResponseCode()); - } - - mResultSchema = driver.getSchema(); - - // hasResultSet should be true only if the query has a FetchTask - // "explain" is an exception for now - if(driver.getPlan().getFetchTask() != null) { - //Schema has to be set - if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) { - throw new HiveSQLException("Error running query: Schema and FieldSchema " + - "should be set when query plan has a FetchTask"); - } - resultSchema = new TableSchema(mResultSchema); - setHasResultSet(true); - } else { - setHasResultSet(false); - } - // Set hasResultSet true if the plan has ExplainTask - // TODO explain should use a FetchTask for reading - for (Task task: driver.getPlan().getRootTasks()) { - if (task.getClass() == ExplainTask.class) { - resultSchema = new TableSchema(mResultSchema); - setHasResultSet(true); - break; - } - } - } catch (HiveSQLException e) { - setState(OperationState.ERROR); - throw e; - } catch (Exception e) { - setState(OperationState.ERROR); - throw new HiveSQLException("Error running query: " + e.toString(), e); - } - setState(OperationState.FINISHED); - } - - @Override - public void run() throws HiveSQLException { - setState(OperationState.PENDING); - if (!shouldRunAsync()) { - runInternal(getConfigForOperation()); - } else { - Runnable backgroundOperation = new Runnable() { - SessionState ss = SessionState.get(); - @Override - public void run() { - SessionState.setCurrentSessionState(ss); - try { - runInternal(getConfigForOperation()); - } catch (HiveSQLException e) { - LOG.error("Error: ", e); - // TODO: Return a more detailed error to the client, - // currently the async thread only writes to the log and sets the OperationState - } - } - }; - try { - // This submit blocks if no background threads are available to run this operation - backgroundHandle = - getParentSession().getSessionManager().submitBackgroundOperation(backgroundOperation); - } catch (RejectedExecutionException rejected) { - setState(OperationState.ERROR); - throw new HiveSQLException(rejected); - } - } - } - - private void cleanup(OperationState state) throws HiveSQLException { - setState(state); - if (shouldRunAsync()) { - if (backgroundHandle != null) { - backgroundHandle.cancel(true); - } - } - if (driver != null) { - driver.close(); - driver.destroy(); - } - - SessionState ss = SessionState.get(); - if (ss.getTmpOutputFile() != null) { - ss.getTmpOutputFile().delete(); - } - } - - @Override - public void cancel() throws HiveSQLException { - cleanup(OperationState.CANCELED); - } - - @Override - public void close() throws HiveSQLException { - cleanup(OperationState.CLOSED); - } - - @Override - public TableSchema getResultSetSchema() throws HiveSQLException { - assertState(OperationState.FINISHED); - if (resultSchema == null) { - resultSchema = new TableSchema(driver.getSchema()); - } - return resultSchema; - } - - - @Override - public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { - assertState(OperationState.FINISHED); - ArrayList rows = new ArrayList(); - driver.setMaxRows((int)maxRows); - - try { - driver.getResults(rows); - - getSerDe(); - StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector(); - List fieldRefs = soi.getAllStructFieldRefs(); - RowSet rowSet = new RowSet(); - - Object[] deserializedFields = new Object[fieldRefs.size()]; - Object rowObj; - ObjectInspector fieldOI; - - for (String rowString : rows) { - rowObj = serde.deserialize(new BytesWritable(rowString.getBytes())); - for (int i = 0; i < fieldRefs.size(); i++) { - StructField fieldRef = fieldRefs.get(i); - fieldOI = fieldRef.getFieldObjectInspector(); - deserializedFields[i] = convertLazyToJava(soi.getStructFieldData(rowObj, fieldRef), fieldOI); - } - rowSet.addRow(resultSchema, deserializedFields); - } - return rowSet; - } catch (IOException e) { - throw new HiveSQLException(e); - } catch (CommandNeedRetryException e) { - throw new HiveSQLException(e); - } catch (Exception e) { - throw new HiveSQLException(e); - } - } - - /** - * Convert a LazyObject to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0 - * Specification, Table B-3: Mapping from JDBC Types to Java Object Types). - * - * This method is kept consistent with {@link HiveResultSetMetaData#hiveTypeToSqlType}. - */ - private static Object convertLazyToJava(Object o, ObjectInspector oi) { - Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi, ObjectInspectorCopyOption.JAVA); - - if (obj == null) { - return null; - } - if(oi.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) { - return new String((byte[])obj); - } - // for now, expose non-primitive as a string - // TODO: expose non-primitive as a structured object while maintaining JDBC compliance - if (oi.getCategory() != ObjectInspector.Category.PRIMITIVE) { - return SerDeUtils.getJSONString(o, oi); - } - return obj; - } - - - private SerDe getSerDe() throws SQLException { - if (serde != null) { - return serde; - } - try { - List fieldSchemas = mResultSchema.getFieldSchemas(); - List columnNames = new ArrayList(); - List columnTypes = new ArrayList(); - StringBuilder namesSb = new StringBuilder(); - StringBuilder typesSb = new StringBuilder(); - - if (fieldSchemas != null && !fieldSchemas.isEmpty()) { - for (int pos = 0; pos < fieldSchemas.size(); pos++) { - if (pos != 0) { - namesSb.append(","); - typesSb.append(","); - } - columnNames.add(fieldSchemas.get(pos).getName()); - columnTypes.add(fieldSchemas.get(pos).getType()); - namesSb.append(fieldSchemas.get(pos).getName()); - typesSb.append(fieldSchemas.get(pos).getType()); - } - } - String names = namesSb.toString(); - String types = typesSb.toString(); - - serde = new LazySimpleSerDe(); - Properties props = new Properties(); - if (names.length() > 0) { - LOG.debug("Column names: " + names); - props.setProperty(serdeConstants.LIST_COLUMNS, names); - } - if (types.length() > 0) { - LOG.debug("Column types: " + types); - props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types); - } - serde.initialize(new HiveConf(), props); - - } catch (Exception ex) { - ex.printStackTrace(); - throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex); - } - return serde; - } - - private boolean shouldRunAsync() { - return runAsync; - } - - /** - * If there are query specific settings to overlay, then create a copy of config - * There are two cases we need to clone the session config that's being passed to hive driver - * 1. Async query - - * If the client changes a config setting, that shouldn't reflect in the execution already underway - * 2. confOverlay - - * The query specific settings should only be applied to the query config and not session - * @return new configuration - * @throws HiveSQLException - */ - private HiveConf getConfigForOperation() throws HiveSQLException { - HiveConf sqlOperationConf = getParentSession().getHiveConf(); - if (!getConfOverlay().isEmpty() || shouldRunAsync()) { - // clone the partent session config for this query - sqlOperationConf = new HiveConf(sqlOperationConf); - - // apply overlay query specific settings, if any - for (Map.Entry confEntry : getConfOverlay().entrySet()) { - try { - sqlOperationConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); - } catch (IllegalArgumentException e) { - throw new HiveSQLException("Error applying statement specific settings", e); - } - } - } - return sqlOperationConf; - } + private Driver driver = null; + private CommandProcessorResponse response; + private TableSchema resultSchema = null; + private Schema mResultSchema = null; + private SerDe serde = null; + private final boolean runAsync; + private volatile Future backgroundHandle; + + public SQLOperation(HiveSession parentSession, String statement, Map confOverlay, boolean runInBackground) { + // TODO: call setRemoteUser in ExecuteStatementOperation or higher. + super(parentSession, statement, confOverlay); + this.runAsync = runInBackground; + } + + public void prepare() throws HiveSQLException { + } + + private void runInternal(HiveConf sqlOperationConf) throws HiveSQLException { + setState(OperationState.RUNNING); + try { + driver = new Driver(sqlOperationConf, getParentSession().getUserName()); + // In Hive server mode, we are not able to retry in the FetchTask + // case, when calling fetch queries since execute() has returned. + // For now, we disable the test attempts. + driver.setTryCount(Integer.MAX_VALUE); + + String subStatement = new VariableSubstitution().substitute(sqlOperationConf, statement); + + response = driver.run(subStatement); + if (0 != response.getResponseCode()) { + throw new HiveSQLException("Error while processing statement: " + + response.getErrorMessage(), response.getSQLState(), response.getResponseCode()); + } + + mResultSchema = driver.getSchema(); + + // hasResultSet should be true only if the query has a FetchTask + // "explain" is an exception for now + if(driver.getPlan().getFetchTask() != null) { + //Schema has to be set + if (mResultSchema == null || !mResultSchema.isSetFieldSchemas()) { + throw new HiveSQLException("Error running query: Schema and FieldSchema " + + "should be set when query plan has a FetchTask"); + } + resultSchema = new TableSchema(mResultSchema); + setHasResultSet(true); + } else { + setHasResultSet(false); + } + // Set hasResultSet true if the plan has ExplainTask + // TODO explain should use a FetchTask for reading + for (Task task: driver.getPlan().getRootTasks()) { + if (task.getClass() == ExplainTask.class) { + resultSchema = new TableSchema(mResultSchema); + setHasResultSet(true); + break; + } + } + } catch (HiveSQLException e) { + setState(OperationState.ERROR); + throw e; + } catch (Exception e) { + setState(OperationState.ERROR); + throw new HiveSQLException("Error running query: " + e.toString(), e); + } + setState(OperationState.FINISHED); + } + + @Override + public void run() throws HiveSQLException { + setState(OperationState.PENDING); + if (!runAsync) { + runInternal(getConfigForOperation()); + } else { + Runnable backgroundOperation = new Runnable() { + SessionState ss = SessionState.get(); + @Override + public void run() { + SessionState.setCurrentSessionState(ss); + try { + runInternal(getConfigForOperation()); + } catch (HiveSQLException e) { + setOperationException(e); + LOG.error("Error: ", e); + } + } + }; + try { + // This submit blocks if no background threads are available to run this operation + backgroundHandle = + getParentSession().getSessionManager().submitBackgroundOperation(backgroundOperation); + } catch (RejectedExecutionException rejected) { + setState(OperationState.ERROR); + throw new HiveSQLException(rejected); + } + } + } + + private void cleanup(OperationState state) throws HiveSQLException { + setState(state); + if (runAsync) { + if (backgroundHandle != null) { + backgroundHandle.cancel(true); + } + } + if (driver != null) { + driver.close(); + driver.destroy(); + } + + SessionState ss = SessionState.get(); + if (ss.getTmpOutputFile() != null) { + ss.getTmpOutputFile().delete(); + } + } + + @Override + public void cancel() throws HiveSQLException { + cleanup(OperationState.CANCELED); + } + + @Override + public void close() throws HiveSQLException { + cleanup(OperationState.CLOSED); + } + + @Override + public TableSchema getResultSetSchema() throws HiveSQLException { + assertState(OperationState.FINISHED); + if (resultSchema == null) { + resultSchema = new TableSchema(driver.getSchema()); + } + return resultSchema; + } + + + @Override + public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { + assertState(OperationState.FINISHED); + ArrayList rows = new ArrayList(); + driver.setMaxRows((int)maxRows); + + try { + driver.getResults(rows); + + getSerDe(); + StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector(); + List fieldRefs = soi.getAllStructFieldRefs(); + RowSet rowSet = new RowSet(); + + Object[] deserializedFields = new Object[fieldRefs.size()]; + Object rowObj; + ObjectInspector fieldOI; + + for (String rowString : rows) { + rowObj = serde.deserialize(new BytesWritable(rowString.getBytes())); + for (int i = 0; i < fieldRefs.size(); i++) { + StructField fieldRef = fieldRefs.get(i); + fieldOI = fieldRef.getFieldObjectInspector(); + deserializedFields[i] = convertLazyToJava(soi.getStructFieldData(rowObj, fieldRef), fieldOI); + } + rowSet.addRow(resultSchema, deserializedFields); + } + return rowSet; + } catch (IOException e) { + throw new HiveSQLException(e); + } catch (CommandNeedRetryException e) { + throw new HiveSQLException(e); + } catch (Exception e) { + throw new HiveSQLException(e); + } + } + + /** + * Convert a LazyObject to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0 + * Specification, Table B-3: Mapping from JDBC Types to Java Object Types). + * + * This method is kept consistent with {@link HiveResultSetMetaData#hiveTypeToSqlType}. + */ + private static Object convertLazyToJava(Object o, ObjectInspector oi) { + Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi, ObjectInspectorCopyOption.JAVA); + + if (obj == null) { + return null; + } + if(oi.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) { + return new String((byte[])obj); + } + // for now, expose non-primitive as a string + // TODO: expose non-primitive as a structured object while maintaining JDBC compliance + if (oi.getCategory() != ObjectInspector.Category.PRIMITIVE) { + return SerDeUtils.getJSONString(o, oi); + } + return obj; + } + + + private SerDe getSerDe() throws SQLException { + if (serde != null) { + return serde; + } + try { + List fieldSchemas = mResultSchema.getFieldSchemas(); + List columnNames = new ArrayList(); + List columnTypes = new ArrayList(); + StringBuilder namesSb = new StringBuilder(); + StringBuilder typesSb = new StringBuilder(); + + if (fieldSchemas != null && !fieldSchemas.isEmpty()) { + for (int pos = 0; pos < fieldSchemas.size(); pos++) { + if (pos != 0) { + namesSb.append(","); + typesSb.append(","); + } + columnNames.add(fieldSchemas.get(pos).getName()); + columnTypes.add(fieldSchemas.get(pos).getType()); + namesSb.append(fieldSchemas.get(pos).getName()); + typesSb.append(fieldSchemas.get(pos).getType()); + } + } + String names = namesSb.toString(); + String types = typesSb.toString(); + + serde = new LazySimpleSerDe(); + Properties props = new Properties(); + if (names.length() > 0) { + LOG.debug("Column names: " + names); + props.setProperty(serdeConstants.LIST_COLUMNS, names); + } + if (types.length() > 0) { + LOG.debug("Column types: " + types); + props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types); + } + serde.initialize(new HiveConf(), props); + + } catch (Exception ex) { + ex.printStackTrace(); + throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex); + } + return serde; + } + + /** + * If there are query specific settings to overlay, then create a copy of config + * There are two cases we need to clone the session config that's being passed to hive driver + * 1. Async query - + * If the client changes a config setting, that shouldn't reflect in the execution already underway + * 2. confOverlay - + * The query specific settings should only be applied to the query config and not session + * @return new configuration + * @throws HiveSQLException + */ + private HiveConf getConfigForOperation() throws HiveSQLException { + HiveConf sqlOperationConf = getParentSession().getHiveConf(); + if (!getConfOverlay().isEmpty() || runAsync) { + // clone the partent session config for this query + sqlOperationConf = new HiveConf(sqlOperationConf); + + // apply overlay query specific settings, if any + for (Map.Entry confEntry : getConfOverlay().entrySet()) { + try { + sqlOperationConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); + } catch (IllegalArgumentException e) { + throw new HiveSQLException("Error applying statement specific settings", e); + } + } + } + return sqlOperationConf; + } } diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java index f392d62..7313dec 100644 --- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -41,151 +41,151 @@ * */ public class SessionManager extends CompositeService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); - private HiveConf hiveConf; - private final Map handleToSession = new HashMap(); - private OperationManager operationManager = new OperationManager(); - private static final Object sessionMapLock = new Object(); - private ExecutorService backgroundOperationPool; - - public SessionManager() { - super("SessionManager"); - } - - @Override - public synchronized void init(HiveConf hiveConf) { - this.hiveConf = hiveConf; - operationManager = new OperationManager(); - int backgroundPoolSize = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS); - LOG.info("HiveServer2: Async execution pool size" + backgroundPoolSize); - backgroundOperationPool = Executors.newFixedThreadPool(backgroundPoolSize); - addService(operationManager); - super.init(hiveConf); - } - - @Override - public synchronized void start() { - super.start(); - // TODO - } - - @Override - public synchronized void stop() { - // TODO - super.stop(); - if (backgroundOperationPool != null) { - backgroundOperationPool.shutdown(); - long timeout = hiveConf.getLongVar(ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT); - try { - backgroundOperationPool.awaitTermination(timeout, TimeUnit.SECONDS); - } catch (InterruptedException exc) { - LOG.warn("HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT = " + timeout + - " seconds has been exceeded. RUNNING background operations will be shut down", exc); - } - } - } - - - public SessionHandle openSession(String username, String password, Map sessionConf) - throws HiveSQLException { - return openSession(username, password, sessionConf, false, null); - } - - public SessionHandle openSession(String username, String password, Map sessionConf, - boolean withImpersonation, String delegationToken) throws HiveSQLException { - if (username == null) { - username = threadLocalUserName.get(); - } - HiveSession session; - if (withImpersonation) { - HiveSessionImplwithUGI hiveSessionUgi = new HiveSessionImplwithUGI(username, password, - sessionConf, delegationToken); - session = HiveSessionProxy.getProxy(hiveSessionUgi, hiveSessionUgi.getSessionUgi()); - hiveSessionUgi.setProxySession(session); - } else { - session = new HiveSessionImpl(username, password, sessionConf); - } - session.setSessionManager(this); - session.setOperationManager(operationManager); - synchronized(sessionMapLock) { - handleToSession.put(session.getSessionHandle(), session); - } - try { - executeSessionHooks(session); - } catch (Exception e) { - throw new HiveSQLException("Failed to execute session hooks", e); - } - return session.getSessionHandle(); - } - - public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { - HiveSession session; - synchronized(sessionMapLock) { - session = handleToSession.remove(sessionHandle); - } - if (session == null) { - throw new HiveSQLException("Session does not exist!"); - } - session.close(); - } - - - public HiveSession getSession(SessionHandle sessionHandle) throws HiveSQLException { - HiveSession session; - synchronized(sessionMapLock) { - session = handleToSession.get(sessionHandle); - } - if (session == null) { - throw new HiveSQLException("Invalid SessionHandle: " + sessionHandle); - } - return session; - } - - public OperationManager getOperationManager() { - return operationManager; - } - - private static ThreadLocal threadLocalIpAddress = new ThreadLocal() { - @Override - protected synchronized String initialValue() { - return null; - } - }; - - public static void setIpAddress(String ipAddress) { - threadLocalIpAddress.set(ipAddress); - } - - private void clearIpAddress() { - threadLocalIpAddress.remove(); - } - - private static ThreadLocal threadLocalUserName = new ThreadLocal(){ - @Override - protected synchronized String initialValue() { - return null; - } - }; - - public static void setUserName(String userName) { - threadLocalUserName.set(userName); - } - - private void clearUserName() { - threadLocalUserName.remove(); - } - - // execute session hooks - private void executeSessionHooks(HiveSession session) throws Exception { - List sessionHooks = HookUtils.getHooks(hiveConf, - HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK, HiveSessionHook.class); - for (HiveSessionHook sessionHook : sessionHooks) { - sessionHook.run(new HiveSessionHookContextImpl(session)); - } - } - - public Future submitBackgroundOperation(Runnable r) { - return backgroundOperationPool.submit(r); - } + private static final Log LOG = LogFactory.getLog(CompositeService.class); + private HiveConf hiveConf; + private final Map handleToSession = new HashMap(); + private OperationManager operationManager = new OperationManager(); + private static final Object sessionMapLock = new Object(); + private ExecutorService backgroundOperationPool; + + public SessionManager() { + super("SessionManager"); + } + + @Override + public synchronized void init(HiveConf hiveConf) { + this.hiveConf = hiveConf; + operationManager = new OperationManager(); + int backgroundPoolSize = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS); + LOG.info("HiveServer2: Async execution pool size" + backgroundPoolSize); + backgroundOperationPool = Executors.newFixedThreadPool(backgroundPoolSize); + addService(operationManager); + super.init(hiveConf); + } + + @Override + public synchronized void start() { + super.start(); + // TODO + } + + @Override + public synchronized void stop() { + // TODO + super.stop(); + if (backgroundOperationPool != null) { + backgroundOperationPool.shutdown(); + long timeout = hiveConf.getLongVar(ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT); + try { + backgroundOperationPool.awaitTermination(timeout, TimeUnit.SECONDS); + } catch (InterruptedException exc) { + LOG.warn("HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT = " + timeout + + " seconds has been exceeded. RUNNING background operations will be shut down", exc); + } + } + } + + + public SessionHandle openSession(String username, String password, Map sessionConf) + throws HiveSQLException { + return openSession(username, password, sessionConf, false, null); + } + + public SessionHandle openSession(String username, String password, Map sessionConf, + boolean withImpersonation, String delegationToken) throws HiveSQLException { + if (username == null) { + username = threadLocalUserName.get(); + } + HiveSession session; + if (withImpersonation) { + HiveSessionImplwithUGI hiveSessionUgi = new HiveSessionImplwithUGI(username, password, + sessionConf, delegationToken); + session = HiveSessionProxy.getProxy(hiveSessionUgi, hiveSessionUgi.getSessionUgi()); + hiveSessionUgi.setProxySession(session); + } else { + session = new HiveSessionImpl(username, password, sessionConf); + } + session.setSessionManager(this); + session.setOperationManager(operationManager); + synchronized(sessionMapLock) { + handleToSession.put(session.getSessionHandle(), session); + } + try { + executeSessionHooks(session); + } catch (Exception e) { + throw new HiveSQLException("Failed to execute session hooks", e); + } + return session.getSessionHandle(); + } + + public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { + HiveSession session; + synchronized(sessionMapLock) { + session = handleToSession.remove(sessionHandle); + } + if (session == null) { + throw new HiveSQLException("Session does not exist!"); + } + session.close(); + } + + + public HiveSession getSession(SessionHandle sessionHandle) throws HiveSQLException { + HiveSession session; + synchronized(sessionMapLock) { + session = handleToSession.get(sessionHandle); + } + if (session == null) { + throw new HiveSQLException("Invalid SessionHandle: " + sessionHandle); + } + return session; + } + + public OperationManager getOperationManager() { + return operationManager; + } + + private static ThreadLocal threadLocalIpAddress = new ThreadLocal() { + @Override + protected synchronized String initialValue() { + return null; + } + }; + + public static void setIpAddress(String ipAddress) { + threadLocalIpAddress.set(ipAddress); + } + + private void clearIpAddress() { + threadLocalIpAddress.remove(); + } + + private static ThreadLocal threadLocalUserName = new ThreadLocal(){ + @Override + protected synchronized String initialValue() { + return null; + } + }; + + public static void setUserName(String userName) { + threadLocalUserName.set(userName); + } + + private void clearUserName() { + threadLocalUserName.remove(); + } + + // execute session hooks + private void executeSessionHooks(HiveSession session) throws Exception { + List sessionHooks = HookUtils.getHooks(hiveConf, + HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK, HiveSessionHook.class); + for (HiveSessionHook sessionHook : sessionHooks) { + sessionHook.run(new HiveSessionHookContextImpl(session)); + } + } + + public Future submitBackgroundOperation(Runnable r) { + return backgroundOperationPool.submit(r); + } } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 9df110e..8e176ca 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -37,7 +37,7 @@ import org.apache.hive.service.cli.GetInfoValue; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationHandle; -import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.TableSchema; @@ -50,353 +50,361 @@ */ public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable { - public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName()); - - protected CLIService cliService; - private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS); - private static final TStatus ERROR_STATUS = new TStatus(TStatusCode.ERROR_STATUS); - - protected int portNum; - protected InetSocketAddress serverAddress; - protected TServer server; - protected org.mortbay.jetty.Server httpServer; - - private boolean isStarted = false; - protected boolean isEmbedded = false; - - protected HiveConf hiveConf; - - protected int minWorkerThreads; - protected int maxWorkerThreads; - - protected static HiveAuthFactory hiveAuthFactory; - - public ThriftCLIService(CLIService cliService, String serviceName) { - super(serviceName); - this.cliService = cliService; - } - - @Override - public synchronized void init(HiveConf hiveConf) { - this.hiveConf = hiveConf; - super.init(hiveConf); - } - - @Override - public synchronized void start() { - super.start(); - if (!isStarted && !isEmbedded) { - new Thread(this).start(); - isStarted = true; - } - } - - @Override - public synchronized void stop() { - if (isStarted && !isEmbedded) { - if(server != null) { - server.stop(); - LOG.info("Thrift server has stopped"); - } - if((httpServer != null) && httpServer.isStarted()) { - try { - httpServer.stop(); - LOG.info("Http server has stopped"); - } catch (Exception e) { - LOG.error("Error stopping Http server: ", e); - } - } - isStarted = false; - } - super.stop(); - } - - - @Override - public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException { - TOpenSessionResp resp = new TOpenSessionResp(); - try { - SessionHandle sessionHandle = getSessionHandle(req); - resp.setSessionHandle(sessionHandle.toTSessionHandle()); - // TODO: set real configuration map - resp.setConfiguration(new HashMap()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error opening session: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - private String getUserName(TOpenSessionReq req) { - if (hiveAuthFactory != null - && hiveAuthFactory.getRemoteUser() != null) { - return hiveAuthFactory.getRemoteUser(); - } else { - return req.getUsername(); - } - } - - SessionHandle getSessionHandle(TOpenSessionReq req) - throws HiveSQLException, LoginException, IOException { - - String userName = getUserName(req); - - SessionHandle sessionHandle = null; - if ( - cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION) - .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString()) - && - cliService.getHiveConf(). - getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) - ) - { - String delegationTokenStr = null; - try { - delegationTokenStr = cliService.getDelegationTokenFromMetaStore(userName); - } catch (UnsupportedOperationException e) { - // The delegation token is not applicable in the given deployment mode - } - sessionHandle = cliService.openSessionWithImpersonation(userName, req.getPassword(), - req.getConfiguration(), delegationTokenStr); - } else { - sessionHandle = cliService.openSession(userName, req.getPassword(), - req.getConfiguration()); - } - return sessionHandle; - } - - @Override - public TCloseSessionResp CloseSession(TCloseSessionReq req) throws TException { - TCloseSessionResp resp = new TCloseSessionResp(); - try { - SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle()); - cliService.closeSession(sessionHandle); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error closing session: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetInfoResp GetInfo(TGetInfoReq req) throws TException { - TGetInfoResp resp = new TGetInfoResp(); - try { - GetInfoValue getInfoValue = - cliService.getInfo(new SessionHandle(req.getSessionHandle()), - GetInfoType.getGetInfoType(req.getInfoType())); - resp.setInfoValue(getInfoValue.toTGetInfoValue()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting info: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TExecuteStatementResp ExecuteStatement(TExecuteStatementReq req) throws TException { - TExecuteStatementResp resp = new TExecuteStatementResp(); - try { - SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle()); - String statement = req.getStatement(); - Map confOverlay = req.getConfOverlay(); - Boolean runAsync = req.isRunAsync(); - OperationHandle operationHandle = runAsync ? - cliService.executeStatementAsync(sessionHandle, statement, confOverlay) - : cliService.executeStatement(sessionHandle, statement, confOverlay); - resp.setOperationHandle(operationHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error executing statement: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetTypeInfoResp GetTypeInfo(TGetTypeInfoReq req) throws TException { - TGetTypeInfoResp resp = new TGetTypeInfoResp(); - try { - OperationHandle operationHandle = cliService.getTypeInfo(new SessionHandle(req.getSessionHandle())); - resp.setOperationHandle(operationHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting type info: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetCatalogsResp GetCatalogs(TGetCatalogsReq req) throws TException { - TGetCatalogsResp resp = new TGetCatalogsResp(); - try { - OperationHandle opHandle = cliService.getCatalogs(new SessionHandle(req.getSessionHandle())); - resp.setOperationHandle(opHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting catalogs: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetSchemasResp GetSchemas(TGetSchemasReq req) throws TException { - TGetSchemasResp resp = new TGetSchemasResp(); - try { - OperationHandle opHandle = cliService.getSchemas( - new SessionHandle(req.getSessionHandle()), req.getCatalogName(), req.getSchemaName()); - resp.setOperationHandle(opHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting schemas: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetTablesResp GetTables(TGetTablesReq req) throws TException { - TGetTablesResp resp = new TGetTablesResp(); - try { - OperationHandle opHandle = cliService - .getTables(new SessionHandle(req.getSessionHandle()), req.getCatalogName(), - req.getSchemaName(), req.getTableName(), req.getTableTypes()); - resp.setOperationHandle(opHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting tables: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetTableTypesResp GetTableTypes(TGetTableTypesReq req) throws TException { - TGetTableTypesResp resp = new TGetTableTypesResp(); - try { - OperationHandle opHandle = cliService.getTableTypes(new SessionHandle(req.getSessionHandle())); - resp.setOperationHandle(opHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting table types: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetColumnsResp GetColumns(TGetColumnsReq req) throws TException { - TGetColumnsResp resp = new TGetColumnsResp(); - try { - OperationHandle opHandle = cliService.getColumns( - new SessionHandle(req.getSessionHandle()), - req.getCatalogName(), - req.getSchemaName(), - req.getTableName(), - req.getColumnName()); - resp.setOperationHandle(opHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting columns: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetFunctionsResp GetFunctions(TGetFunctionsReq req) throws TException { - TGetFunctionsResp resp = new TGetFunctionsResp(); - try { - OperationHandle opHandle = cliService.getFunctions( - new SessionHandle(req.getSessionHandle()), req.getCatalogName(), - req.getSchemaName(), req.getFunctionName()); - resp.setOperationHandle(opHandle.toTOperationHandle()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting functions: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetOperationStatusResp GetOperationStatus(TGetOperationStatusReq req) throws TException { - TGetOperationStatusResp resp = new TGetOperationStatusResp(); - try { - OperationState operationState = cliService.getOperationStatus(new OperationHandle(req.getOperationHandle())); - resp.setOperationState(operationState.toTOperationState()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting operation status: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TCancelOperationResp CancelOperation(TCancelOperationReq req) throws TException { - TCancelOperationResp resp = new TCancelOperationResp(); - try { - cliService.cancelOperation(new OperationHandle(req.getOperationHandle())); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error cancelling operation: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TCloseOperationResp CloseOperation(TCloseOperationReq req) throws TException { - TCloseOperationResp resp = new TCloseOperationResp(); - try { - cliService.closeOperation(new OperationHandle(req.getOperationHandle())); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error closing operation: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TGetResultSetMetadataResp GetResultSetMetadata(TGetResultSetMetadataReq req) - throws TException { - TGetResultSetMetadataResp resp = new TGetResultSetMetadataResp(); - try { - TableSchema schema = cliService.getResultSetMetadata(new OperationHandle(req.getOperationHandle())); - resp.setSchema(schema.toTTableSchema()); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error getting result set metadata: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException { - TFetchResultsResp resp = new TFetchResultsResp(); - try { - RowSet rowSet = cliService.fetchResults( - new OperationHandle(req.getOperationHandle()), - FetchOrientation.getFetchOrientation(req.getOrientation()), - req.getMaxRows()); - resp.setResults(rowSet.toTRowSet()); - resp.setHasMoreRows(false); - resp.setStatus(OK_STATUS); - } catch (Exception e) { - LOG.warn("Error fetching results: ", e); - resp.setStatus(HiveSQLException.toTStatus(e)); - } - return resp; - } - - @Override - public abstract void run(); + public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName()); + + protected CLIService cliService; + private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS); + private static final TStatus ERROR_STATUS = new TStatus(TStatusCode.ERROR_STATUS); + + protected int portNum; + protected InetSocketAddress serverAddress; + protected TServer server; + protected org.mortbay.jetty.Server httpServer; + + private boolean isStarted = false; + protected boolean isEmbedded = false; + + protected HiveConf hiveConf; + + protected int minWorkerThreads; + protected int maxWorkerThreads; + + protected static HiveAuthFactory hiveAuthFactory; + + public ThriftCLIService(CLIService cliService, String serviceName) { + super(serviceName); + this.cliService = cliService; + } + + @Override + public synchronized void init(HiveConf hiveConf) { + this.hiveConf = hiveConf; + super.init(hiveConf); + } + + @Override + public synchronized void start() { + super.start(); + if (!isStarted && !isEmbedded) { + new Thread(this).start(); + isStarted = true; + } + } + + @Override + public synchronized void stop() { + if (isStarted && !isEmbedded) { + if(server != null) { + server.stop(); + LOG.info("Thrift server has stopped"); + } + if((httpServer != null) && httpServer.isStarted()) { + try { + httpServer.stop(); + LOG.info("Http server has stopped"); + } catch (Exception e) { + LOG.error("Error stopping Http server: ", e); + } + } + isStarted = false; + } + super.stop(); + } + + + @Override + public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException { + LOG.info("Client protocol version: " + req.getClient_protocol()); + TOpenSessionResp resp = new TOpenSessionResp(); + try { + SessionHandle sessionHandle = getSessionHandle(req); + resp.setSessionHandle(sessionHandle.toTSessionHandle()); + // TODO: set real configuration map + resp.setConfiguration(new HashMap()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error opening session: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + private String getUserName(TOpenSessionReq req) { + if (hiveAuthFactory != null + && hiveAuthFactory.getRemoteUser() != null) { + return hiveAuthFactory.getRemoteUser(); + } else { + return req.getUsername(); + } + } + + SessionHandle getSessionHandle(TOpenSessionReq req) + throws HiveSQLException, LoginException, IOException { + + String userName = getUserName(req); + + SessionHandle sessionHandle = null; + if ( + cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION) + .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString()) + && + cliService.getHiveConf(). + getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) + ) + { + String delegationTokenStr = null; + try { + delegationTokenStr = cliService.getDelegationTokenFromMetaStore(userName); + } catch (UnsupportedOperationException e) { + // The delegation token is not applicable in the given deployment mode + } + sessionHandle = cliService.openSessionWithImpersonation(userName, req.getPassword(), + req.getConfiguration(), delegationTokenStr); + } else { + sessionHandle = cliService.openSession(userName, req.getPassword(), + req.getConfiguration()); + } + return sessionHandle; + } + + @Override + public TCloseSessionResp CloseSession(TCloseSessionReq req) throws TException { + TCloseSessionResp resp = new TCloseSessionResp(); + try { + SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle()); + cliService.closeSession(sessionHandle); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error closing session: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetInfoResp GetInfo(TGetInfoReq req) throws TException { + TGetInfoResp resp = new TGetInfoResp(); + try { + GetInfoValue getInfoValue = + cliService.getInfo(new SessionHandle(req.getSessionHandle()), + GetInfoType.getGetInfoType(req.getInfoType())); + resp.setInfoValue(getInfoValue.toTGetInfoValue()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting info: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TExecuteStatementResp ExecuteStatement(TExecuteStatementReq req) throws TException { + TExecuteStatementResp resp = new TExecuteStatementResp(); + try { + SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle()); + String statement = req.getStatement(); + Map confOverlay = req.getConfOverlay(); + Boolean runAsync = req.isRunAsync(); + OperationHandle operationHandle = runAsync ? + cliService.executeStatementAsync(sessionHandle, statement, confOverlay) + : cliService.executeStatement(sessionHandle, statement, confOverlay); + resp.setOperationHandle(operationHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error executing statement: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetTypeInfoResp GetTypeInfo(TGetTypeInfoReq req) throws TException { + TGetTypeInfoResp resp = new TGetTypeInfoResp(); + try { + OperationHandle operationHandle = cliService.getTypeInfo(new SessionHandle(req.getSessionHandle())); + resp.setOperationHandle(operationHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting type info: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetCatalogsResp GetCatalogs(TGetCatalogsReq req) throws TException { + TGetCatalogsResp resp = new TGetCatalogsResp(); + try { + OperationHandle opHandle = cliService.getCatalogs(new SessionHandle(req.getSessionHandle())); + resp.setOperationHandle(opHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting catalogs: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetSchemasResp GetSchemas(TGetSchemasReq req) throws TException { + TGetSchemasResp resp = new TGetSchemasResp(); + try { + OperationHandle opHandle = cliService.getSchemas( + new SessionHandle(req.getSessionHandle()), req.getCatalogName(), req.getSchemaName()); + resp.setOperationHandle(opHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting schemas: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetTablesResp GetTables(TGetTablesReq req) throws TException { + TGetTablesResp resp = new TGetTablesResp(); + try { + OperationHandle opHandle = cliService + .getTables(new SessionHandle(req.getSessionHandle()), req.getCatalogName(), + req.getSchemaName(), req.getTableName(), req.getTableTypes()); + resp.setOperationHandle(opHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting tables: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetTableTypesResp GetTableTypes(TGetTableTypesReq req) throws TException { + TGetTableTypesResp resp = new TGetTableTypesResp(); + try { + OperationHandle opHandle = cliService.getTableTypes(new SessionHandle(req.getSessionHandle())); + resp.setOperationHandle(opHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting table types: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetColumnsResp GetColumns(TGetColumnsReq req) throws TException { + TGetColumnsResp resp = new TGetColumnsResp(); + try { + OperationHandle opHandle = cliService.getColumns( + new SessionHandle(req.getSessionHandle()), + req.getCatalogName(), + req.getSchemaName(), + req.getTableName(), + req.getColumnName()); + resp.setOperationHandle(opHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting columns: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetFunctionsResp GetFunctions(TGetFunctionsReq req) throws TException { + TGetFunctionsResp resp = new TGetFunctionsResp(); + try { + OperationHandle opHandle = cliService.getFunctions( + new SessionHandle(req.getSessionHandle()), req.getCatalogName(), + req.getSchemaName(), req.getFunctionName()); + resp.setOperationHandle(opHandle.toTOperationHandle()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting functions: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetOperationStatusResp GetOperationStatus(TGetOperationStatusReq req) throws TException { + TGetOperationStatusResp resp = new TGetOperationStatusResp(); + try { + OperationStatus operationStatus = cliService.getOperationStatus( + new OperationHandle(req.getOperationHandle())); + resp.setOperationState(operationStatus.getState().toTOperationState()); + HiveSQLException opException = operationStatus.getOperationException(); + if (opException != null) { + resp.setSqlState(opException.getSQLState()); + resp.setErrorCode(opException.getErrorCode()); + resp.setErrorMessage(opException.getMessage()); + } + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting operation status: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TCancelOperationResp CancelOperation(TCancelOperationReq req) throws TException { + TCancelOperationResp resp = new TCancelOperationResp(); + try { + cliService.cancelOperation(new OperationHandle(req.getOperationHandle())); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error cancelling operation: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TCloseOperationResp CloseOperation(TCloseOperationReq req) throws TException { + TCloseOperationResp resp = new TCloseOperationResp(); + try { + cliService.closeOperation(new OperationHandle(req.getOperationHandle())); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error closing operation: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TGetResultSetMetadataResp GetResultSetMetadata(TGetResultSetMetadataReq req) + throws TException { + TGetResultSetMetadataResp resp = new TGetResultSetMetadataResp(); + try { + TableSchema schema = cliService.getResultSetMetadata(new OperationHandle(req.getOperationHandle())); + resp.setSchema(schema.toTTableSchema()); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error getting result set metadata: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException { + TFetchResultsResp resp = new TFetchResultsResp(); + try { + RowSet rowSet = cliService.fetchResults( + new OperationHandle(req.getOperationHandle()), + FetchOrientation.getFetchOrientation(req.getOrientation()), + req.getMaxRows()); + resp.setResults(rowSet.toTRowSet()); + resp.setHasMoreRows(false); + resp.setStatus(OK_STATUS); + } catch (Exception e) { + LOG.warn("Error fetching results: ", e); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + return resp; + } + + @Override + public abstract void run(); } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java index 9bb2a0f..2db5953 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java @@ -28,6 +28,7 @@ import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationHandle; import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.TableSchema; @@ -37,355 +38,361 @@ * */ public class ThriftCLIServiceClient extends CLIServiceClient { - private final TCLIService.Iface cliService; + private final TCLIService.Iface cliService; - public ThriftCLIServiceClient(TCLIService.Iface cliService) { - this.cliService = cliService; - } + public ThriftCLIServiceClient(TCLIService.Iface cliService) { + this.cliService = cliService; + } - public void checkStatus(TStatus status) throws HiveSQLException { - if (TStatusCode.ERROR_STATUS.equals(status.getStatusCode())) { - throw new HiveSQLException(status); - } - } + public void checkStatus(TStatus status) throws HiveSQLException { + if (TStatusCode.ERROR_STATUS.equals(status.getStatusCode())) { + throw new HiveSQLException(status); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) - */ - @Override - public SessionHandle openSession(String username, String password, - Map configuration) - throws HiveSQLException { - try { - TOpenSessionReq req = new TOpenSessionReq(); - req.setUsername(username); - req.setPassword(password); - req.setConfiguration(configuration); - TOpenSessionResp resp = cliService.OpenSession(req); - checkStatus(resp.getStatus()); - return new SessionHandle(resp.getSessionHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#openSession(java.lang.String, java.lang.String, java.util.Map) + */ + @Override + public SessionHandle openSession(String username, String password, + Map configuration) + throws HiveSQLException { + try { + TOpenSessionReq req = new TOpenSessionReq(); + req.setUsername(username); + req.setPassword(password); + req.setConfiguration(configuration); + TOpenSessionResp resp = cliService.OpenSession(req); + checkStatus(resp.getStatus()); + return new SessionHandle(resp.getSessionHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public SessionHandle openSessionWithImpersonation(String username, String password, - Map configuration, String delegationToken) throws HiveSQLException { - throw new HiveSQLException("open with impersonation operation is not supported in the client"); - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public SessionHandle openSessionWithImpersonation(String username, String password, + Map configuration, String delegationToken) throws HiveSQLException { + throw new HiveSQLException("open with impersonation operation is not supported in the client"); + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { - try { - TCloseSessionReq req = new TCloseSessionReq(sessionHandle.toTSessionHandle()); - TCloseSessionResp resp = cliService.CloseSession(req); - checkStatus(resp.getStatus()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public void closeSession(SessionHandle sessionHandle) throws HiveSQLException { + try { + TCloseSessionReq req = new TCloseSessionReq(sessionHandle.toTSessionHandle()); + TCloseSessionResp resp = cliService.CloseSession(req); + checkStatus(resp.getStatus()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) - */ - @Override - public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType) - throws HiveSQLException { - try { - // FIXME extract the right info type - TGetInfoReq req = new TGetInfoReq(sessionHandle.toTSessionHandle(), infoType.toTGetInfoType()); - TGetInfoResp resp = cliService.GetInfo(req); - checkStatus(resp.getStatus()); - return new GetInfoValue(resp.getInfoValue()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getInfo(org.apache.hive.service.cli.SessionHandle, java.util.List) + */ + @Override + public GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType) + throws HiveSQLException { + try { + // FIXME extract the right info type + TGetInfoReq req = new TGetInfoReq(sessionHandle.toTSessionHandle(), infoType.toTGetInfoType()); + TGetInfoResp resp = cliService.GetInfo(req); + checkStatus(resp.getStatus()); + return new GetInfoValue(resp.getInfoValue()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map) - */ - @Override - public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, - Map confOverlay) - throws HiveSQLException { - return executeStatementInternal(sessionHandle, statement, confOverlay, false); - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#executeStatement(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map) + */ + @Override + public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, + Map confOverlay) + throws HiveSQLException { + return executeStatementInternal(sessionHandle, statement, confOverlay, false); + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map) - */ - @Override - public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, - Map confOverlay) - throws HiveSQLException { - return executeStatementInternal(sessionHandle, statement, confOverlay, true); - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#executeStatementAsync(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.util.Map) + */ + @Override + public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, + Map confOverlay) + throws HiveSQLException { + return executeStatementInternal(sessionHandle, statement, confOverlay, true); + } - private OperationHandle executeStatementInternal(SessionHandle sessionHandle, String statement, - Map confOverlay, boolean isAsync) - throws HiveSQLException { - try { - TExecuteStatementReq req = - new TExecuteStatementReq(sessionHandle.toTSessionHandle(), statement); - req.setConfOverlay(confOverlay); - req.setRunAsync(isAsync); - TExecuteStatementResp resp = cliService.ExecuteStatement(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + private OperationHandle executeStatementInternal(SessionHandle sessionHandle, String statement, + Map confOverlay, boolean isAsync) + throws HiveSQLException { + try { + TExecuteStatementReq req = + new TExecuteStatementReq(sessionHandle.toTSessionHandle(), statement); + req.setConfOverlay(confOverlay); + req.setRunAsync(isAsync); + TExecuteStatementResp resp = cliService.ExecuteStatement(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException { - try { - TGetTypeInfoReq req = new TGetTypeInfoReq(sessionHandle.toTSessionHandle()); - TGetTypeInfoResp resp = cliService.GetTypeInfo(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getTypeInfo(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getTypeInfo(SessionHandle sessionHandle) throws HiveSQLException { + try { + TGetTypeInfoReq req = new TGetTypeInfoReq(sessionHandle.toTSessionHandle()); + TGetTypeInfoResp resp = cliService.GetTypeInfo(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException { - try { - TGetCatalogsReq req = new TGetCatalogsReq(sessionHandle.toTSessionHandle()); - TGetCatalogsResp resp = cliService.GetCatalogs(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getCatalogs(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getCatalogs(SessionHandle sessionHandle) throws HiveSQLException { + try { + TGetCatalogsReq req = new TGetCatalogsReq(sessionHandle.toTSessionHandle()); + TGetCatalogsResp resp = cliService.GetCatalogs(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) - */ - @Override - public OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName, - String schemaName) - throws HiveSQLException { - try { - TGetSchemasReq req = new TGetSchemasReq(sessionHandle.toTSessionHandle()); - req.setCatalogName(catalogName); - req.setSchemaName(schemaName); - TGetSchemasResp resp = cliService.GetSchemas(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getSchemas(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String) + */ + @Override + public OperationHandle getSchemas(SessionHandle sessionHandle, String catalogName, + String schemaName) + throws HiveSQLException { + try { + TGetSchemasReq req = new TGetSchemasReq(sessionHandle.toTSessionHandle()); + req.setCatalogName(catalogName); + req.setSchemaName(schemaName); + TGetSchemasResp resp = cliService.GetSchemas(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) - */ - @Override - public OperationHandle getTables(SessionHandle sessionHandle, String catalogName, - String schemaName, String tableName, List tableTypes) - throws HiveSQLException { - try { - TGetTablesReq req = new TGetTablesReq(sessionHandle.toTSessionHandle()); - req.setTableName(tableName); - req.setTableTypes(tableTypes); - req.setSchemaName(schemaName); - TGetTablesResp resp = cliService.GetTables(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getTables(org.apache.hive.service.cli.SessionHandle, java.lang.String, java.lang.String, java.lang.String, java.util.List) + */ + @Override + public OperationHandle getTables(SessionHandle sessionHandle, String catalogName, + String schemaName, String tableName, List tableTypes) + throws HiveSQLException { + try { + TGetTablesReq req = new TGetTablesReq(sessionHandle.toTSessionHandle()); + req.setTableName(tableName); + req.setTableTypes(tableTypes); + req.setSchemaName(schemaName); + TGetTablesResp resp = cliService.GetTables(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException { - try { - TGetTableTypesReq req = new TGetTableTypesReq(sessionHandle.toTSessionHandle()); - TGetTableTypesResp resp = cliService.GetTableTypes(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getTableTypes(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getTableTypes(SessionHandle sessionHandle) throws HiveSQLException { + try { + TGetTableTypesReq req = new TGetTableTypesReq(sessionHandle.toTSessionHandle()); + TGetTableTypesResp resp = cliService.GetTableTypes(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getColumns(SessionHandle sessionHandle, - String catalogName, String schemaName, String tableName, String columnName) - throws HiveSQLException { - try { - TGetColumnsReq req = new TGetColumnsReq(); - req.setSessionHandle(sessionHandle.toTSessionHandle()); - req.setCatalogName(catalogName); - req.setSchemaName(schemaName); - req.setTableName(tableName); - req.setColumnName(columnName); - TGetColumnsResp resp = cliService.GetColumns(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getColumns(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getColumns(SessionHandle sessionHandle, + String catalogName, String schemaName, String tableName, String columnName) + throws HiveSQLException { + try { + TGetColumnsReq req = new TGetColumnsReq(); + req.setSessionHandle(sessionHandle.toTSessionHandle()); + req.setCatalogName(catalogName); + req.setSchemaName(schemaName); + req.setTableName(tableName); + req.setColumnName(columnName); + TGetColumnsResp resp = cliService.GetColumns(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle) - */ - @Override - public OperationHandle getFunctions(SessionHandle sessionHandle, - String catalogName, String schemaName, String functionName) throws HiveSQLException { - try { - TGetFunctionsReq req = new TGetFunctionsReq(sessionHandle.toTSessionHandle(), functionName); - req.setCatalogName(catalogName); - req.setSchemaName(schemaName); - TGetFunctionsResp resp = cliService.GetFunctions(req); - checkStatus(resp.getStatus()); - return new OperationHandle(resp.getOperationHandle()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getFunctions(org.apache.hive.service.cli.SessionHandle) + */ + @Override + public OperationHandle getFunctions(SessionHandle sessionHandle, + String catalogName, String schemaName, String functionName) throws HiveSQLException { + try { + TGetFunctionsReq req = new TGetFunctionsReq(sessionHandle.toTSessionHandle(), functionName); + req.setCatalogName(catalogName); + req.setSchemaName(schemaName); + TGetFunctionsResp resp = cliService.GetFunctions(req); + checkStatus(resp.getStatus()); + return new OperationHandle(resp.getOperationHandle()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public OperationState getOperationStatus(OperationHandle opHandle) throws HiveSQLException { - try { - TGetOperationStatusReq req = new TGetOperationStatusReq(opHandle.toTOperationHandle()); - TGetOperationStatusResp resp = cliService.GetOperationStatus(req); - checkStatus(resp.getStatus()); - return OperationState.getOperationState(resp.getOperationState()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public OperationStatus getOperationStatus(OperationHandle opHandle) throws HiveSQLException { + try { + TGetOperationStatusReq req = new TGetOperationStatusReq(opHandle.toTOperationHandle()); + TGetOperationStatusResp resp = cliService.GetOperationStatus(req); + // Checks the status of the RPC call, throws an exception in case of error + checkStatus(resp.getStatus()); + OperationState opState = OperationState.getOperationState(resp.getOperationState()); + HiveSQLException opException = null; + if (opState == OperationState.ERROR) { + opException = new HiveSQLException(resp.getErrorMessage(), resp.getSqlState(), resp.getErrorCode()); + } + return new OperationStatus(opState, opException); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { - try { - TCancelOperationReq req = new TCancelOperationReq(opHandle.toTOperationHandle()); - TCancelOperationResp resp = cliService.CancelOperation(req); - checkStatus(resp.getStatus()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#cancelOperation(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { + try { + TCancelOperationReq req = new TCancelOperationReq(opHandle.toTOperationHandle()); + TCancelOperationResp resp = cliService.CancelOperation(req); + checkStatus(resp.getStatus()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public void closeOperation(OperationHandle opHandle) - throws HiveSQLException { - try { - TCloseOperationReq req = new TCloseOperationReq(opHandle.toTOperationHandle()); - TCloseOperationResp resp = cliService.CloseOperation(req); - checkStatus(resp.getStatus()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#closeOperation(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public void closeOperation(OperationHandle opHandle) + throws HiveSQLException { + try { + TCloseOperationReq req = new TCloseOperationReq(opHandle.toTOperationHandle()); + TCloseOperationResp resp = cliService.CloseOperation(req); + checkStatus(resp.getStatus()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public TableSchema getResultSetMetadata(OperationHandle opHandle) - throws HiveSQLException { - try { - TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); - TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); - checkStatus(resp.getStatus()); - return new TableSchema(resp.getSchema()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#getResultSetMetadata(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public TableSchema getResultSetMetadata(OperationHandle opHandle) + throws HiveSQLException { + try { + TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); + TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); + checkStatus(resp.getStatus()); + return new TableSchema(resp.getSchema()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) - */ - @Override - public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) - throws HiveSQLException { - try { - TFetchResultsReq req = new TFetchResultsReq(); - req.setOperationHandle(opHandle.toTOperationHandle()); - req.setOrientation(orientation.toTFetchOrientation()); - req.setMaxRows(maxRows); - TFetchResultsResp resp = cliService.FetchResults(req); - checkStatus(resp.getStatus()); - return new RowSet(resp.getResults()); - } catch (HiveSQLException e) { - throw e; - } catch (Exception e) { - throw new HiveSQLException(e); - } - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long) + */ + @Override + public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows) + throws HiveSQLException { + try { + TFetchResultsReq req = new TFetchResultsReq(); + req.setOperationHandle(opHandle.toTOperationHandle()); + req.setOrientation(orientation.toTFetchOrientation()); + req.setMaxRows(maxRows); + TFetchResultsResp resp = cliService.FetchResults(req); + checkStatus(resp.getStatus()); + return new RowSet(resp.getResults()); + } catch (HiveSQLException e) { + throw e; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } - /* (non-Javadoc) - * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle) - */ - @Override - public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { - // TODO: set the correct default fetch size - return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 10000); - } + /* (non-Javadoc) + * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle) + */ + @Override + public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { + // TODO: set the correct default fetch size + return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 10000); + } } diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index cd9d99a..05b007b 100644 --- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Collections; @@ -27,6 +28,7 @@ import java.util.Map; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -37,258 +39,287 @@ */ public abstract class CLIServiceTest { - protected static CLIServiceClient client; - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - } - - @Test - public void openSessionTest() throws Exception { - SessionHandle sessionHandle = client - .openSession("tom", "password", Collections.emptyMap()); - assertNotNull(sessionHandle); - client.closeSession(sessionHandle); - - sessionHandle = client.openSession("tom", "password"); - assertNotNull(sessionHandle); - client.closeSession(sessionHandle); - } - - @Test - public void getFunctionsTest() throws Exception { - SessionHandle sessionHandle = client.openSession("tom", "password", new HashMap()); - assertNotNull(sessionHandle); - OperationHandle opHandle = client.getFunctions(sessionHandle, null, null, "*"); - TableSchema schema = client.getResultSetMetadata(opHandle); - - ColumnDescriptor columnDesc = schema.getColumnDescriptorAt(0); - assertEquals("FUNCTION_CAT", columnDesc.getName()); - assertEquals(Type.STRING_TYPE, columnDesc.getType()); - - columnDesc = schema.getColumnDescriptorAt(1); - assertEquals("FUNCTION_SCHEM", columnDesc.getName()); - assertEquals(Type.STRING_TYPE, columnDesc.getType()); - - columnDesc = schema.getColumnDescriptorAt(2); - assertEquals("FUNCTION_NAME", columnDesc.getName()); - assertEquals(Type.STRING_TYPE, columnDesc.getType()); - - columnDesc = schema.getColumnDescriptorAt(3); - assertEquals("REMARKS", columnDesc.getName()); - assertEquals(Type.STRING_TYPE, columnDesc.getType()); - - columnDesc = schema.getColumnDescriptorAt(4); - assertEquals("FUNCTION_TYPE", columnDesc.getName()); - assertEquals(Type.INT_TYPE, columnDesc.getType()); - - columnDesc = schema.getColumnDescriptorAt(5); - assertEquals("SPECIFIC_NAME", columnDesc.getName()); - assertEquals(Type.STRING_TYPE, columnDesc.getType()); - - client.closeOperation(opHandle); - client.closeSession(sessionHandle); - } - - @Test - public void getInfoTest() throws Exception { - SessionHandle sessionHandle = client.openSession("tom", "password", new HashMap()); - assertNotNull(sessionHandle); - - GetInfoValue value = client.getInfo(sessionHandle, GetInfoType.CLI_DBMS_NAME); - System.out.println(value.getStringValue()); - - value = client.getInfo(sessionHandle, GetInfoType.CLI_SERVER_NAME); - System.out.println(value.getStringValue()); - - value = client.getInfo(sessionHandle, GetInfoType.CLI_DBMS_VER); - System.out.println(value.getStringValue()); - - client.closeSession(sessionHandle); - } - - @Test - public void testExecuteStatement() throws Exception { - HashMap confOverlay = new HashMap(); - SessionHandle sessionHandle = client.openSession("tom", "password", - new HashMap()); - assertNotNull(sessionHandle); - - // Change lock manager, otherwise unit-test doesn't go through - String queryString = "SET hive.lock.manager=" + - "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; - client.executeStatement(sessionHandle, queryString, confOverlay); - - // Drop the table if it exists - queryString = "DROP TABLE IF EXISTS TEST_EXEC"; - client.executeStatement(sessionHandle, queryString, confOverlay); - - // Create a test table - queryString = "CREATE TABLE TEST_EXEC(ID STRING)"; - client.executeStatement(sessionHandle, queryString, confOverlay); - - // Blocking execute - queryString = "SELECT ID FROM TEST_EXEC"; - OperationHandle ophandle = client.executeStatement(sessionHandle, queryString, confOverlay); - - // Expect query to be completed now - assertEquals("Query should be finished", - OperationState.FINISHED, client.getOperationStatus(ophandle)); - } - - @Test - public void testExecuteStatementAsync() throws Exception { - HashMap confOverlay = new HashMap(); - SessionHandle sessionHandle = client.openSession("tom", "password", - new HashMap()); - // Timeout for the poll in case of asynchronous execute - long pollTimeout = System.currentTimeMillis() + 100000; - assertNotNull(sessionHandle); - OperationState state = null; - OperationHandle ophandle; - - // Change lock manager, otherwise unit-test doesn't go through - String queryString = "SET hive.lock.manager=" + - "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; - client.executeStatement(sessionHandle, queryString, confOverlay); - - // Drop the table if it exists - queryString = "DROP TABLE IF EXISTS TEST_EXEC_ASYNC"; - client.executeStatement(sessionHandle, queryString, confOverlay); - - // Create a test table - queryString = "CREATE TABLE TEST_EXEC_ASYNC(ID STRING)"; - client.executeStatement(sessionHandle, queryString, confOverlay); - - // Test async execution response when query is malformed - String wrongQueryString = "SELECT NAME FROM TEST_EXEC"; - ophandle = client.executeStatementAsync(sessionHandle, wrongQueryString, confOverlay); - - int count = 0; - while (true) { - // Break if polling times out - if (System.currentTimeMillis() > pollTimeout) { - System.out.println("Polling timed out"); - break; - } - state = client.getOperationStatus(ophandle); - System.out.println("Polling: " + ophandle + " count=" + (++count) - + " state=" + state); - - if (OperationState.CANCELED == state || state == OperationState.CLOSED - || state == OperationState.FINISHED || state == OperationState.ERROR) { - break; - } - Thread.sleep(1000); - } - assertEquals("Query should return an error state", - OperationState.ERROR, client.getOperationStatus(ophandle)); - - // Test async execution when query is well formed - queryString = "SELECT ID FROM TEST_EXEC_ASYNC"; - ophandle = - client.executeStatementAsync(sessionHandle, queryString, confOverlay); - - count = 0; - while (true) { - // Break if polling times out - if (System.currentTimeMillis() > pollTimeout) { - System.out.println("Polling timed out"); - break; - } - state = client.getOperationStatus(ophandle); - System.out.println("Polling: " + ophandle + " count=" + (++count) - + " state=" + state); - - if (OperationState.CANCELED == state || state == OperationState.CLOSED - || state == OperationState.FINISHED || state == OperationState.ERROR) { - break; - } - Thread.sleep(1000); - } - assertEquals("Query should be finished", - OperationState.FINISHED, client.getOperationStatus(ophandle)); - - // Cancellation test - ophandle = client.executeStatementAsync(sessionHandle, queryString, confOverlay); - System.out.println("cancelling " + ophandle); - client.cancelOperation(ophandle); - state = client.getOperationStatus(ophandle); - System.out.println(ophandle + " after cancelling, state= " + state); - assertEquals("Query should be cancelled", OperationState.CANCELED, state); - } - - /** - * Test per statement configuration overlay. - * Create a table using hiveconf: var substitution, with the conf var passed - * via confOverlay.Verify the confOverlay works for the query and does set the - * value in the session configuration - * @throws Exception - */ - @Test - public void testConfOverlay() throws Exception { - SessionHandle sessionHandle = client.openSession("tom", "password", new HashMap()); - assertNotNull(sessionHandle); - String tabName = "TEST_CONF_EXEC"; - String tabNameVar = "tabNameVar"; - - String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; - OperationHandle opHandle = client.executeStatement(sessionHandle, setLockMgr, null); - client.closeOperation(opHandle); - - String dropTable = "DROP TABLE IF EXISTS " + tabName; - opHandle = client.executeStatement(sessionHandle, dropTable, null); - client.closeOperation(opHandle); - - // set a pass a property to operation and check if its set the query config - Map confOverlay = new HashMap(); - confOverlay.put(tabNameVar, tabName); - - // execute statement with the conf overlay - String createTab = "CREATE TABLE ${hiveconf:" + tabNameVar + "} (id int)"; - opHandle = client.executeStatement(sessionHandle, createTab, confOverlay); - assertNotNull(opHandle); - // query should pass and create the table - assertEquals("Query should be finished", - OperationState.FINISHED, client.getOperationStatus(opHandle)); - client.closeOperation(opHandle); - - // select from the new table should pass - String selectTab = "SELECT * FROM " + tabName; - opHandle = client.executeStatement(sessionHandle, selectTab, null); - assertNotNull(opHandle); - // query should pass and create the table - assertEquals("Query should be finished", - OperationState.FINISHED, client.getOperationStatus(opHandle)); - client.closeOperation(opHandle); - - // the settings in confoverly should not be part of session config - // another query referring that property with the conf overlay should fail - selectTab = "SELECT * FROM ${hiveconf:" + tabNameVar + "}"; - try { - opHandle = client.executeStatement(sessionHandle, selectTab, null); - fail("Query should fail"); - } catch (HiveSQLException e) { - // Expected exception - } - - // cleanup - dropTable = "DROP TABLE IF EXISTS " + tabName; - opHandle = client.executeStatement(sessionHandle, dropTable, null); - client.closeOperation(opHandle); - - - client.closeSession(sessionHandle); - } + protected static CLIServiceClient client; + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + } + + @Test + public void openSessionTest() throws Exception { + SessionHandle sessionHandle = client.openSession( + "tom", "password", Collections.emptyMap()); + assertNotNull(sessionHandle); + client.closeSession(sessionHandle); + + sessionHandle = client.openSession("tom", "password"); + assertNotNull(sessionHandle); + client.closeSession(sessionHandle); + } + + @Test + public void getFunctionsTest() throws Exception { + SessionHandle sessionHandle = client.openSession("tom", "password"); + assertNotNull(sessionHandle); + + OperationHandle opHandle = client.getFunctions(sessionHandle, null, null, "*"); + TableSchema schema = client.getResultSetMetadata(opHandle); + + ColumnDescriptor columnDesc = schema.getColumnDescriptorAt(0); + assertEquals("FUNCTION_CAT", columnDesc.getName()); + assertEquals(Type.STRING_TYPE, columnDesc.getType()); + + columnDesc = schema.getColumnDescriptorAt(1); + assertEquals("FUNCTION_SCHEM", columnDesc.getName()); + assertEquals(Type.STRING_TYPE, columnDesc.getType()); + + columnDesc = schema.getColumnDescriptorAt(2); + assertEquals("FUNCTION_NAME", columnDesc.getName()); + assertEquals(Type.STRING_TYPE, columnDesc.getType()); + + columnDesc = schema.getColumnDescriptorAt(3); + assertEquals("REMARKS", columnDesc.getName()); + assertEquals(Type.STRING_TYPE, columnDesc.getType()); + + columnDesc = schema.getColumnDescriptorAt(4); + assertEquals("FUNCTION_TYPE", columnDesc.getName()); + assertEquals(Type.INT_TYPE, columnDesc.getType()); + + columnDesc = schema.getColumnDescriptorAt(5); + assertEquals("SPECIFIC_NAME", columnDesc.getName()); + assertEquals(Type.STRING_TYPE, columnDesc.getType()); + + // Cleanup + client.closeOperation(opHandle); + client.closeSession(sessionHandle); + } + + @Test + public void getInfoTest() throws Exception { + SessionHandle sessionHandle = client.openSession( + "tom", "password", Collections.emptyMap()); + assertNotNull(sessionHandle); + + GetInfoValue value = client.getInfo(sessionHandle, GetInfoType.CLI_DBMS_NAME); + System.out.println(value.getStringValue()); + + value = client.getInfo(sessionHandle, GetInfoType.CLI_SERVER_NAME); + System.out.println(value.getStringValue()); + + value = client.getInfo(sessionHandle, GetInfoType.CLI_DBMS_VER); + System.out.println(value.getStringValue()); + + client.closeSession(sessionHandle); + } + + @Test + public void testExecuteStatement() throws Exception { + HashMap confOverlay = new HashMap(); + SessionHandle sessionHandle = client.openSession( + "tom", "password", new HashMap()); + assertNotNull(sessionHandle); + + OperationHandle opHandle; + + String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + + " = false"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + + queryString = "DROP TABLE IF EXISTS TEST_EXEC"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + + // Create a test table + queryString = "CREATE TABLE TEST_EXEC(ID STRING)"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + + // Blocking execute + queryString = "SELECT ID FROM TEST_EXEC"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + // Expect query to be completed now + assertEquals("Query should be finished", + OperationState.FINISHED, client.getOperationStatus(opHandle).getState()); + client.closeOperation(opHandle); + + // Cleanup + queryString = "DROP TABLE IF EXISTS TEST_EXEC"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + client.closeSession(sessionHandle); + } + + @Test + public void testExecuteStatementAsync() throws Exception { + HashMap confOverlay = new HashMap(); + SessionHandle sessionHandle = client.openSession("tom", "password", + new HashMap()); + assertNotNull(sessionHandle); + + // Timeout for the poll in case of asynchronous execute + long pollTimeout = System.currentTimeMillis() + 10000; + OperationState state = null; + OperationHandle opHandle; + OperationStatus opStatus = null; + + String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + + " = false"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + + // Drop the table if it exists + queryString = "DROP TABLE IF EXISTS TEST_EXEC_ASYNC"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + + // Create a test table + queryString = "CREATE TABLE TEST_EXEC_ASYNC(ID STRING)"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + + // Test async execution response when query is malformed + // This query will throw an error with errorCode: 10004 + // Refer org.apache.hadoop.hive.ql.ErrorMsg for details + String wrongQueryString = "SELECT NON_EXISTANT_COLUMN FROM TEST_EXEC_ASYNC"; + opHandle = client.executeStatementAsync(sessionHandle, wrongQueryString, confOverlay); + int count = 0; + while (true) { + // Break if polling times out + if (System.currentTimeMillis() > pollTimeout) { + System.out.println("Polling timed out"); + break; + } + opStatus = client.getOperationStatus(opHandle); + state = opStatus.getState(); + System.out.println("Polling: " + opHandle + " count=" + (++count) + + " state=" + state); + + if (state == OperationState.CANCELED || state == OperationState.CLOSED + || state == OperationState.FINISHED || state == OperationState.ERROR) { + break; + } + Thread.sleep(1000); + } + assertEquals("Operation should be in error state", OperationState.ERROR, state); + // sqlState, errorCode and errorMsg should be set to appropriate values + // Refer org.apache.hadoop.hive.ql.ErrorMsg for details + assertEquals(opStatus.getOperationException().getSQLState(), "42000"); + assertEquals(opStatus.getOperationException().getErrorCode(), 10004); + // The expected error message should be a substring of the returned error message + String errorMsg = opStatus.getOperationException().getMessage().toLowerCase(); + String expectedErrorMsg = ErrorMsg.getErrorMsg(10004).getMsg().toLowerCase(); + assertTrue("Incorrect error message", errorMsg.contains(expectedErrorMsg)); + client.closeOperation(opHandle); + + // Test async execution when query is well formed + queryString = "SELECT ID FROM TEST_EXEC_ASYNC"; + opHandle = client.executeStatementAsync(sessionHandle, queryString, confOverlay); + count = 0; + while (true) { + // Break if polling times out + if (System.currentTimeMillis() > pollTimeout) { + System.out.println("Polling timed out"); + break; + } + opStatus = client.getOperationStatus(opHandle); + state = opStatus.getState(); + System.out.println("Polling: " + opHandle + " count=" + (++count) + + " state=" + state); + + if (state == OperationState.CANCELED || state == OperationState.CLOSED + || state == OperationState.FINISHED || state == OperationState.ERROR) { + break; + } + Thread.sleep(1000); + } + assertEquals("Query should be finished", OperationState.FINISHED, state); + client.closeOperation(opHandle); + + // Cancellation test + opHandle = client.executeStatementAsync(sessionHandle, queryString, confOverlay); + System.out.println("cancelling " + opHandle); + client.cancelOperation(opHandle); + state = client.getOperationStatus(opHandle).getState(); + System.out.println(opHandle + " after cancelling, state= " + state); + assertEquals("Query should be cancelled", OperationState.CANCELED, state); + + // Cleanup + queryString = "DROP TABLE IF EXISTS TEST_EXEC_ASYNC"; + opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); + client.closeOperation(opHandle); + client.closeSession(sessionHandle); + } + + /** + * Test per statement configuration overlay. + * Create a table using hiveconf: var substitution, with the conf var passed + * via confOverlay.Verify the confOverlay works for the query and does set the + * value in the session configuration + * @throws Exception + */ + @Test + public void testConfOverlay() throws Exception { + SessionHandle sessionHandle = client.openSession("tom", "password", new HashMap()); + assertNotNull(sessionHandle); + String tabName = "TEST_CONF_EXEC"; + String tabNameVar = "tabNameVar"; + + String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + + " = false"; + OperationHandle opHandle = client.executeStatement(sessionHandle, setLockMgr, null); + client.closeOperation(opHandle); + + String dropTable = "DROP TABLE IF EXISTS " + tabName; + opHandle = client.executeStatement(sessionHandle, dropTable, null); + client.closeOperation(opHandle); + + // set a pass a property to operation and check if its set the query config + Map confOverlay = new HashMap(); + confOverlay.put(tabNameVar, tabName); + + // execute statement with the conf overlay + String createTab = "CREATE TABLE ${hiveconf:" + tabNameVar + "} (id int)"; + opHandle = client.executeStatement(sessionHandle, createTab, confOverlay); + assertNotNull(opHandle); + // query should pass and create the table + assertEquals("Query should be finished", + OperationState.FINISHED, client.getOperationStatus(opHandle).getState()); + client.closeOperation(opHandle); + + // select from the new table should pass + String selectTab = "SELECT * FROM " + tabName; + opHandle = client.executeStatement(sessionHandle, selectTab, null); + assertNotNull(opHandle); + // query should pass and create the table + assertEquals("Query should be finished", + OperationState.FINISHED, client.getOperationStatus(opHandle).getState()); + client.closeOperation(opHandle); + + // the settings in conf overlay should not be part of session config + // another query referring that property with the conf overlay should fail + selectTab = "SELECT * FROM ${hiveconf:" + tabNameVar + "}"; + try { + opHandle = client.executeStatement(sessionHandle, selectTab, null); + fail("Query should fail"); + } catch (HiveSQLException e) { + // Expected exception + } + + // cleanup + dropTable = "DROP TABLE IF EXISTS " + tabName; + opHandle = client.executeStatement(sessionHandle, dropTable, null); + client.closeOperation(opHandle); + client.closeSession(sessionHandle); + } } diff --git a/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java index ff7166d..9ec511c 100644 --- a/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java @@ -31,12 +31,12 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hive.service.Service; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.PlainSaslHelper; import org.apache.hive.service.cli.CLIService; import org.apache.hive.service.cli.HiveSQLException; -import org.apache.hive.service.cli.OperationState; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.session.HiveSession; import org.apache.hive.service.cli.session.SessionManager; @@ -59,228 +59,340 @@ */ public abstract class ThriftCLIServiceTest { - protected static int port; - protected static String host = "localhost"; - protected static HiveServer2 hiveServer2; - protected static TCLIService.Client client; - protected static HiveConf hiveConf; - protected static String anonymousUser = "anonymous"; - protected static String anonymousPasswd = "anonymous"; - - - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - // Find a free port - port = MetaStoreUtils.findFreePort(); - hiveServer2 = new HiveServer2(); - hiveConf = new HiveConf(); - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - stopHiveServer2(); - } - - protected static void startHiveServer2WithConf(HiveConf hiveConf) throws Exception { - hiveServer2.init(hiveConf); - // Start HiveServer2 with given config - // Fail if server doesn't start - try { - hiveServer2.start(); - } catch (Throwable t) { - t.printStackTrace(); - fail(); - } - // Wait for startup to complete - Thread.sleep(2000); - System.out.println("HiveServer2 started on port " + port); - } - - protected static void stopHiveServer2() throws Exception { - if (hiveServer2 != null) { - hiveServer2.stop(); - } - } - - protected static TTransport createBinaryTransport() throws Exception { - return PlainSaslHelper.getPlainTransport(anonymousUser, anonymousPasswd, - new TSocket(host, port)); - } - - protected static void initClient(TTransport transport) { - // Create the corresponding client - TProtocol protocol = new TBinaryProtocol(transport); - client = new TCLIService.Client(protocol); - } - - @Test - public void testOpenSession() throws Exception { - // Create a new request object - TOpenSessionReq openReq = new TOpenSessionReq(); - - // Get the response; ignore exception if any - TOpenSessionResp openResp = client.OpenSession(openReq); - assertNotNull("Response should not be null", openResp); - - TSessionHandle sessHandle = openResp.getSessionHandle(); - assertNotNull("Session handle should not be null", sessHandle); - - assertEquals(openResp.getStatus().getStatusCode(), TStatusCode.SUCCESS_STATUS); - - // Close the session; ignore exception if any - TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); - client.CloseSession(closeReq); - } - - @Test - public void testGetFunctions() throws Exception { - // Create a new open session request object - TOpenSessionReq openReq = new TOpenSessionReq(); - TSessionHandle sessHandle = client.OpenSession(openReq).getSessionHandle(); - assertNotNull(sessHandle); - - TGetFunctionsReq funcReq = new TGetFunctionsReq(); - funcReq.setSessionHandle(sessHandle); - funcReq.setFunctionName("*"); - funcReq.setCatalogName(null); - funcReq.setSchemaName(null); - - TGetFunctionsResp funcResp = client.GetFunctions(funcReq); - assertNotNull(funcResp); - assertNotNull(funcResp.getStatus()); - assertFalse(funcResp.getStatus().getStatusCode() == TStatusCode.ERROR_STATUS); - - // Close the session; ignore exception if any - TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); - client.CloseSession(closeReq); - } - - @Test - public void testExecuteStatement() throws Exception { - // Create a new request object - TOpenSessionReq openReq = new TOpenSessionReq(); - TSessionHandle sessHandle = client.OpenSession(openReq).getSessionHandle(); - assertNotNull(sessHandle); - - // Change lock manager to embedded mode - String queryString = "SET hive.lock.manager=" + - "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; - executeQuerySync(queryString, sessHandle); - - // Drop the table if it exists - queryString = "DROP TABLE IF EXISTS TEST_EXEC_THRIFT"; - executeQuerySync(queryString, sessHandle); - - // Create a test table - queryString = "CREATE TABLE TEST_EXEC_THRIFT(ID STRING)"; - executeQuerySync(queryString, sessHandle); - - // Execute another query to test - queryString = "SELECT ID FROM TEST_EXEC_THRIFT"; - TExecuteStatementResp execResp = executeQuerySync(queryString, sessHandle); - TOperationHandle operationHandle = execResp.getOperationHandle(); - assertNotNull(operationHandle); - - TGetOperationStatusReq opStatusReq = new TGetOperationStatusReq(); - opStatusReq.setOperationHandle(operationHandle); - assertNotNull(opStatusReq); - TGetOperationStatusResp opStatusResp = client.GetOperationStatus(opStatusReq); - - // Expect query to be completed now - assertEquals("Query should be finished", - OperationState.FINISHED, OperationState.getOperationState(opStatusResp.getOperationState())); - - queryString = "DROP TABLE TEST_EXEC_THRIFT"; - executeQuerySync(queryString, sessHandle); - - // Close the session; ignore exception if any - TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); - client.CloseSession(closeReq); - } - - private TExecuteStatementResp executeQuerySync(String queryString, TSessionHandle sessHandle) - throws Exception { - TExecuteStatementReq execReq = new TExecuteStatementReq(); - execReq.setSessionHandle(sessHandle); - execReq.setStatement(queryString); - execReq.setRunAsync(false); - TExecuteStatementResp execResp = client.ExecuteStatement(execReq); - assertNotNull(execResp); - assertFalse(execResp.getStatus().getStatusCode() == TStatusCode.ERROR_STATUS); - return execResp; - } - - protected void testOpenSessionExpectedException() { - boolean caughtEx = false; - // Create a new open session request object - TOpenSessionReq openReq = new TOpenSessionReq(); - try { - client.OpenSession(openReq).getSessionHandle(); - } catch (Exception e) { - caughtEx = true; - System.out.println("Exception expected: " + e.toString()); - } - assertTrue("Exception expected", caughtEx); - } - - /** - * Test setting {@link HiveConf.ConfVars}} config parameter - * HIVE_SERVER2_ENABLE_DOAS for kerberos secure mode - * @throws IOException - * @throws LoginException - * @throws HiveSQLException - */ - @Test - public void testDoAs() throws HiveSQLException, LoginException, IOException { - HiveConf hconf = new HiveConf(); - assertTrue("default value of hive server2 doAs should be true", - hconf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)); - - hconf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, - HiveAuthFactory.AuthTypes.KERBEROS.toString()); - - CLIService cliService = new CLIService(); - cliService.init(hconf); - ThriftCLIService tcliService = new ThriftBinaryCLIService(cliService); - TOpenSessionReq req = new TOpenSessionReq(); - req.setUsername("testuser1"); - SessionHandle sHandle = tcliService.getSessionHandle(req ); - SessionManager sManager = getSessionManager(cliService.getServices()); - HiveSession session = sManager.getSession(sHandle); - - //Proxy class for doing doAs on all calls is used when doAs is enabled - // and kerberos security is on - assertTrue("check if session class is a proxy", session instanceof java.lang.reflect.Proxy); - } - - private SessionManager getSessionManager(Collection services) { - for(Service s : services){ - if(s instanceof SessionManager){ - return (SessionManager)s; - } - } - return null; - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - - } + protected static int port; + protected static String host = "localhost"; + protected static HiveServer2 hiveServer2; + protected static TCLIService.Client client; + protected static HiveConf hiveConf; + protected static String anonymousUser = "anonymous"; + protected static String anonymousPasswd = "anonymous"; + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + // Find a free port + port = MetaStoreUtils.findFreePort(); + hiveServer2 = new HiveServer2(); + hiveConf = new HiveConf(); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + stopHiveServer2(); + } + + protected static void startHiveServer2WithConf(HiveConf hiveConf) throws Exception { + hiveServer2.init(hiveConf); + // Start HiveServer2 with given config + // Fail if server doesn't start + try { + hiveServer2.start(); + } catch (Throwable t) { + t.printStackTrace(); + fail(); + } + // Wait for startup to complete + Thread.sleep(2000); + System.out.println("HiveServer2 started on port " + port); + } + + protected static void stopHiveServer2() throws Exception { + if (hiveServer2 != null) { + hiveServer2.stop(); + } + } + + protected static TTransport createBinaryTransport() throws Exception { + return PlainSaslHelper.getPlainTransport(anonymousUser, anonymousPasswd, + new TSocket(host, port)); + } + + protected static void initClient(TTransport transport) { + // Create the corresponding client + TProtocol protocol = new TBinaryProtocol(transport); + client = new TCLIService.Client(protocol); + } + + @Test + public void testOpenSession() throws Exception { + // Create a new request object + TOpenSessionReq openReq = new TOpenSessionReq(); + + // Get the response; ignore exception if any + TOpenSessionResp openResp = client.OpenSession(openReq); + assertNotNull("Response should not be null", openResp); + + TSessionHandle sessHandle = openResp.getSessionHandle(); + assertNotNull("Session handle should not be null", sessHandle); + + assertEquals(openResp.getStatus().getStatusCode(), TStatusCode.SUCCESS_STATUS); + + // Close the session; ignore exception if any + TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); + client.CloseSession(closeReq); + } + + @Test + public void testGetFunctions() throws Exception { + // Create a new open session request object + TOpenSessionReq openReq = new TOpenSessionReq(); + TSessionHandle sessHandle = client.OpenSession(openReq).getSessionHandle(); + assertNotNull(sessHandle); + + TGetFunctionsReq funcReq = new TGetFunctionsReq(); + funcReq.setSessionHandle(sessHandle); + funcReq.setFunctionName("*"); + funcReq.setCatalogName(null); + funcReq.setSchemaName(null); + + TGetFunctionsResp funcResp = client.GetFunctions(funcReq); + assertNotNull(funcResp); + assertNotNull(funcResp.getStatus()); + assertFalse(funcResp.getStatus().getStatusCode() == TStatusCode.ERROR_STATUS); + + // Close the session; ignore exception if any + TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); + client.CloseSession(closeReq); + } + + /** + * Test synchronous query execution + * @throws Exception + */ + @Test + public void testExecuteStatement() throws Exception { + // Create a new request object + TOpenSessionReq openReq = new TOpenSessionReq(); + TSessionHandle sessHandle = client.OpenSession(openReq).getSessionHandle(); + assertNotNull(sessHandle); + + // Change lock manager to embedded mode + String queryString = "SET hive.lock.manager=" + + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; + executeQuery(queryString, sessHandle, false); + + // Drop the table if it exists + queryString = "DROP TABLE IF EXISTS TEST_EXEC_THRIFT"; + executeQuery(queryString, sessHandle, false); + + // Create a test table + queryString = "CREATE TABLE TEST_EXEC_THRIFT(ID STRING)"; + executeQuery(queryString, sessHandle, false); + + // Execute another query + queryString = "SELECT ID FROM TEST_EXEC_THRIFT"; + TExecuteStatementResp execResp = executeQuery(queryString, sessHandle, false); + TOperationHandle operationHandle = execResp.getOperationHandle(); + assertNotNull(operationHandle); + + TGetOperationStatusReq opStatusReq = new TGetOperationStatusReq(); + opStatusReq.setOperationHandle(operationHandle); + assertNotNull(opStatusReq); + TGetOperationStatusResp opStatusResp = client.GetOperationStatus(opStatusReq); + TOperationState state = opStatusResp.getOperationState(); + // Expect query to be completed now + assertEquals("Query should be finished", TOperationState.FINISHED_STATE, state); + + // Cleanup + queryString = "DROP TABLE TEST_EXEC_THRIFT"; + executeQuery(queryString, sessHandle, false); + + // Close the session; ignore exception if any + TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); + client.CloseSession(closeReq); + } + + /** + * Test asynchronous query execution and error message reporting to the client + * @throws Exception + */ + @Test + public void testExecuteStatementAsync() throws Exception { + // Create a new request object + TOpenSessionReq openReq = new TOpenSessionReq(); + TSessionHandle sessHandle = client.OpenSession(openReq).getSessionHandle(); + assertNotNull(sessHandle); + + // Change lock manager to embedded mode + String queryString = "SET hive.lock.manager=" + + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; + executeQuery(queryString, sessHandle, false); + + // Drop the table if it exists + queryString = "DROP TABLE IF EXISTS TEST_EXEC_ASYNC_THRIFT"; + executeQuery(queryString, sessHandle, false); + + // Create a test table + queryString = "CREATE TABLE TEST_EXEC_ASYNC_THRIFT(ID STRING)"; + executeQuery(queryString, sessHandle, false); + + // Execute another query + queryString = "SELECT ID FROM TEST_EXEC_ASYNC_THRIFT"; + System.out.println("Will attempt to execute: " + queryString); + TExecuteStatementResp execResp = executeQuery(queryString, sessHandle, true); + TOperationHandle operationHandle = execResp.getOperationHandle(); + assertNotNull(operationHandle); + + // Poll on the operation status till the query is completed + boolean isQueryRunning = true; + TGetOperationStatusReq opStatusReq; + TGetOperationStatusResp opStatusResp = null; + TOperationState state = null; + long pollTimeout = System.currentTimeMillis() + 100000; + + while(isQueryRunning) { + // Break if polling times out + if (System.currentTimeMillis() > pollTimeout) { + System.out.println("Polling timed out"); + break; + } + opStatusReq = new TGetOperationStatusReq(); + opStatusReq.setOperationHandle(operationHandle); + assertNotNull(opStatusReq); + opStatusResp = client.GetOperationStatus(opStatusReq); + state = opStatusResp.getOperationState(); + System.out.println("Current state: " + state); + + if (state == TOperationState.CANCELED_STATE || state == TOperationState.CLOSED_STATE + || state == TOperationState.FINISHED_STATE || state == TOperationState.ERROR_STATE) { + isQueryRunning = false; + } + Thread.sleep(1000); + } + + // Expect query to be successfully completed now + assertEquals("Query should be finished", + TOperationState.FINISHED_STATE, state); + + // Execute a malformed query + queryString = "SELECT NON_EXISTING_COLUMN FROM TEST_EXEC_ASYNC_THRIFT"; + System.out.println("Will attempt to execute: " + queryString); + execResp = executeQuery(queryString, sessHandle, true); + operationHandle = execResp.getOperationHandle(); + assertNotNull(operationHandle); + isQueryRunning = true; + while(isQueryRunning) { + // Break if polling times out + if (System.currentTimeMillis() > pollTimeout) { + System.out.println("Polling timed out"); + break; + } + opStatusReq = new TGetOperationStatusReq(); + opStatusReq.setOperationHandle(operationHandle); + assertNotNull(opStatusReq); + opStatusResp = client.GetOperationStatus(opStatusReq); + state = opStatusResp.getOperationState(); + System.out.println("Current state: " + state); + + if (state == TOperationState.CANCELED_STATE || state == TOperationState.CLOSED_STATE + || state == TOperationState.FINISHED_STATE || state == TOperationState.ERROR_STATE) { + isQueryRunning = false; + } + Thread.sleep(1000); + } + + // Expect query to return an error state + assertEquals("Operation should be in error state", TOperationState.ERROR_STATE, state); + + // sqlState, errorCode and errorMsg should be set to appropriate values + // Refer org.apache.hadoop.hive.ql.ErrorMsg for details + assertEquals(opStatusResp.getSqlState(), "42000"); + assertEquals(opStatusResp.getErrorCode(), 10004); + // The expected error message should be a substring of the returned error message + String errorMsg = opStatusResp.getErrorMessage().toLowerCase(); + String expectedErrorMsg = ErrorMsg.getErrorMsg(10004).getMsg().toLowerCase(); + assertTrue("Incorrect error message", errorMsg.contains(expectedErrorMsg)); + + // Cleanup + queryString = "DROP TABLE TEST_EXEC_ASYNC_THRIFT"; + executeQuery(queryString, sessHandle, false); + + // Close the session; ignore exception if any + TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); + client.CloseSession(closeReq); + } + + private TExecuteStatementResp executeQuery(String queryString, TSessionHandle sessHandle, boolean runAsync) + throws Exception { + TExecuteStatementReq execReq = new TExecuteStatementReq(); + execReq.setSessionHandle(sessHandle); + execReq.setStatement(queryString); + execReq.setRunAsync(runAsync); + TExecuteStatementResp execResp = client.ExecuteStatement(execReq); + assertNotNull(execResp); + assertFalse(execResp.getStatus().getStatusCode() == TStatusCode.ERROR_STATUS); + return execResp; + } + + protected void testOpenSessionExpectedException() { + boolean caughtEx = false; + // Create a new open session request object + TOpenSessionReq openReq = new TOpenSessionReq(); + try { + client.OpenSession(openReq).getSessionHandle(); + } catch (Exception e) { + caughtEx = true; + System.out.println("Exception expected: " + e.toString()); + } + assertTrue("Exception expected", caughtEx); + } + + /** + * Test setting {@link HiveConf.ConfVars}} config parameter + * HIVE_SERVER2_ENABLE_DOAS for kerberos secure mode + * @throws IOException + * @throws LoginException + * @throws HiveSQLException + */ + @Test + public void testDoAs() throws HiveSQLException, LoginException, IOException { + HiveConf hconf = new HiveConf(); + assertTrue("default value of hive server2 doAs should be true", + hconf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)); + + hconf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, + HiveAuthFactory.AuthTypes.KERBEROS.toString()); + + CLIService cliService = new CLIService(); + cliService.init(hconf); + ThriftCLIService tcliService = new ThriftBinaryCLIService(cliService); + TOpenSessionReq req = new TOpenSessionReq(); + req.setUsername("testuser1"); + SessionHandle sHandle = tcliService.getSessionHandle(req ); + SessionManager sManager = getSessionManager(cliService.getServices()); + HiveSession session = sManager.getSession(sHandle); + + //Proxy class for doing doAs on all calls is used when doAs is enabled + // and kerberos security is on + assertTrue("check if session class is a proxy", session instanceof java.lang.reflect.Proxy); + } + + private SessionManager getSessionManager(Collection services) { + for(Service s : services){ + if(s instanceof SessionManager){ + return (SessionManager)s; + } + } + return null; + } + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + + } }