diff --git service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java index 7944467..d7fc1e8 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java @@ -30,12 +30,17 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetCatalogsOperation. * */ public class GetCatalogsOperation extends MetadataOperation { + + private static final Logger LOG = LoggerFactory.getLogger(GetCatalogsOperation.class.getName()); + private static final TableSchema RESULT_SET_SCHEMA = new TableSchema() .addStringColumn("TABLE_CAT", "Catalog name. NULL if not applicable."); @@ -44,6 +49,7 @@ protected GetCatalogsOperation(HiveSession parentSession) { super(parentSession, OperationType.GET_CATALOGS); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info("Starting GetCatalogsOperation"); } @Override @@ -54,11 +60,11 @@ public void runInternal() throws HiveSQLException { authorizeMetaGets(HiveOperationType.GET_CATALOGS, null); } setState(OperationState.FINISHED); + LOG.info("Fetching catalog metadata has been successfully finished"); } catch (HiveSQLException e) { setState(OperationState.ERROR); throw e; } - } /* (non-Javadoc) diff --git service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java index d67ea90..838dd89 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java @@ -50,6 +50,8 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetColumnsOperation. @@ -57,6 +59,8 @@ */ public class GetColumnsOperation extends MetadataOperation { + private static final Logger LOG = LoggerFactory.getLogger(GetColumnsOperation.class.getName()); + private static final TableSchema RESULT_SET_SCHEMA = new TableSchema() .addPrimitiveColumn("TABLE_CAT", Type.STRING_TYPE, "Catalog name. NULL if not applicable") @@ -127,11 +131,15 @@ protected GetColumnsOperation(HiveSession parentSession, String catalogName, Str this.tableName = tableName; this.columnName = columnName; this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info("Starting GetColumnsOperation with the following parameters: " + + "catalogName={}, schemaName={}, tableName={}, columnName={}", + catalogName, schemaName, tableName, columnName); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching column metadata"); try { IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient(); String schemaPattern = convertSchemaPattern(schemaName); @@ -204,18 +212,25 @@ public void runInternal() throws HiveSQLException { "NO", // IS_AUTO_INCREMENT }; rowSet.addRow(rowData); + + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("column", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, rowData); + } } } } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No column metadata has been returned."); + } setState(OperationState.FINISHED); + LOG.info("Fetching column metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); } - } - private List getPrivObjs(Map> db2Tabs) { List privObjs = new ArrayList<>(); for (Entry> dbTabs : db2Tabs.entrySet()) { diff --git service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java index 99ccd4e..e39502f 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java @@ -117,11 +117,16 @@ public GetCrossReferenceOperation(HiveSession parentSession, this.foreignSchemaName = foreignSchema; this.foreignTableName = foreignTable; this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info("Starting GetCrossReferenceOperation with the following parameters:" + + " parentCatalogName={}, parentSchemaName={}, parentTableName={}, foreignCatalog={}, " + + "foreignSchema={}, foreignTable={}", parentCatalogName, parentSchemaName, + parentTableName, foreignCatalog, foreignSchema, foreignTable); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching cross reference metadata"); try { IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient(); ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, parentTableName, foreignSchemaName, foreignTableName); @@ -130,21 +135,29 @@ public void runInternal() throws HiveSQLException { return; } for (SQLForeignKey fk : fks) { - rowSet.addRow(new Object[] {parentCatalogName, - fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(), - foreignCatalogName, - fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(), - fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(), - fk.getPk_name(), 0}); + Object[] rowData = new Object[] {parentCatalogName, + fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(), + foreignCatalogName, + fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(), + fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(), + fk.getPk_name(), 0}; + rowSet.addRow(rowData); + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("cross reference", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, rowData); + } + } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No cross reference metadata has been returned."); } setState(OperationState.FINISHED); + LOG.info("Fetching cross reference metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); } } - /* (non-Javadoc) * @see org.apache.hive.service.cli.Operation#getResultSetSchema() */ diff --git service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java index 091bf50..5d5d099 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java @@ -41,12 +41,17 @@ import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; import org.apache.thrift.TException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetFunctionsOperation. * */ public class GetFunctionsOperation extends MetadataOperation { + + private static final Logger LOG = LoggerFactory.getLogger(GetFunctionsOperation.class.getName()); + private static final TableSchema RESULT_SET_SCHEMA = new TableSchema() .addPrimitiveColumn("FUNCTION_CAT", Type.STRING_TYPE, "Function catalog (may be null)") @@ -74,11 +79,15 @@ public GetFunctionsOperation(HiveSession parentSession, String catalogName, Stri this.schemaName = schemaName; this.functionName = functionName; this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info( + "Starting GetFunctionsOperation with the following parameters: catalogName={}, schemaName={}, functionName={}", + catalogName, schemaName, functionName); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching function metadata"); if (isAuthV2Enabled()) { // get databases for schema pattern IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient(); @@ -115,16 +124,24 @@ public void runInternal() throws HiveSQLException { functionInfo.getClass().getCanonicalName() }; rowSet.addRow(rowData); + + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("function", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, rowData); + } } } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No function metadata has been returned"); + } setState(OperationState.FINISHED); + LOG.info("Fetching function metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); } } - /* (non-Javadoc) * @see org.apache.hive.service.cli.Operation#getResultSetSchema() */ diff --git service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java index e603fdd..55f4ab6 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java @@ -22,12 +22,10 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.lang.NumberUtils; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest; import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.serde2.thrift.Type; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationState; @@ -35,14 +33,18 @@ import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; -import org.apache.hadoop.hive.serde2.thrift.Type; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetPrimaryKeysOperation. * */ public class GetPrimaryKeysOperation extends MetadataOperation { + + private static final Logger LOG = LoggerFactory.getLogger(GetPrimaryKeysOperation.class.getName()); + /** TABLE_CAT String => table catalog (may be null) TABLE_SCHEM String => table schema (may be null) @@ -78,11 +80,15 @@ public GetPrimaryKeysOperation(HiveSession parentSession, this.schemaName = schemaName; this.tableName = tableName; this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info( + "Starting GetPrimaryKeysOperation with the following parameters: catalogName={}, schemaName={}, tableName={}", + catalogName, schemaName, tableName); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching primary key metadata"); try { IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient(); PrimaryKeysRequest sqlReq = new PrimaryKeysRequest(schemaName, tableName); @@ -91,17 +97,31 @@ public void runInternal() throws HiveSQLException { return; } for (SQLPrimaryKey pk : pks) { - rowSet.addRow(new Object[] {catalogName, pk.getTable_db(), - pk.getTable_name(), pk.getColumn_name(), pk.getKey_seq(), pk.getPk_name()}); - } - setState(OperationState.FINISHED); - } catch (Exception e) { - setState(OperationState.ERROR); - throw new HiveSQLException(e); - } + Object[] rowData = new Object[] { + catalogName, + pk.getTable_db(), + pk.getTable_name(), + pk.getColumn_name(), + pk.getKey_seq(), + pk.getPk_name() + }; + rowSet.addRow(rowData); + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("primary key", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, rowData); + } + } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No primary key metadata has been returned."); + } + setState(OperationState.FINISHED); + LOG.info("Fetching primary key metadata has been successfully finished"); + } catch (Exception e) { + setState(OperationState.ERROR); + throw new HiveSQLException(e); + } } - /* (non-Javadoc) * @see org.apache.hive.service.cli.Operation#getResultSetSchema() */ diff --git service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java index de09ec9..b978787 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java @@ -31,12 +31,17 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetSchemasOperation. * */ public class GetSchemasOperation extends MetadataOperation { + + private static final Logger LOG = LoggerFactory.getLogger(GetSchemasOperation.class.getName()); + private final String catalogName; private final String schemaName; @@ -51,11 +56,15 @@ protected GetSchemasOperation(HiveSession parentSession, String catalogName, Str this.catalogName = catalogName; this.schemaName = schemaName; this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info( + "Starting GetSchemasOperation with the following parameters: catalogName={}, schemaName={}", + catalogName, schemaName); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching schema metadata"); if (isAuthV2Enabled()) { String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName; authorizeMetaGets(HiveOperationType.GET_SCHEMAS, null, cmdStr); @@ -65,15 +74,22 @@ public void runInternal() throws HiveSQLException { String schemaPattern = convertSchemaPattern(schemaName); for (String dbName : metastoreClient.getDatabases(schemaPattern)) { rowSet.addRow(new Object[] {dbName, DEFAULT_HIVE_CATALOG}); + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("schema", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, dbName, DEFAULT_HIVE_CATALOG); + } + } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No schema metadata has been returned."); } setState(OperationState.FINISHED); + LOG.info("Fetching schema metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); } } - /* (non-Javadoc) * @see org.apache.hive.service.cli.Operation#getResultSetSchema() */ diff --git service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java index 59cfbb2..b5dac4b 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java @@ -32,6 +32,8 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetTableTypesOperation. @@ -39,6 +41,8 @@ */ public class GetTableTypesOperation extends MetadataOperation { + private static final Logger LOG = LoggerFactory.getLogger(GetTableTypesOperation.class.getName()); + protected static TableSchema RESULT_SET_SCHEMA = new TableSchema() .addStringColumn("TABLE_TYPE", "Table type name."); @@ -51,19 +55,30 @@ protected GetTableTypesOperation(HiveSession parentSession) { getParentSession().getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info("Starting GetTableTypesOperation"); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching table type metadata"); if (isAuthV2Enabled()) { authorizeMetaGets(HiveOperationType.GET_TABLETYPES, null); } try { for (TableType type : TableType.values()) { - rowSet.addRow(new String[] {tableTypeMapping.mapToClientType(type.toString())}); + String tableType = tableTypeMapping.mapToClientType(type.toString()); + rowSet.addRow(new String[] {tableType}); + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("table type", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, tableType); + } + } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No table type metadata has been returned."); } setState(OperationState.FINISHED); + LOG.info("Fetching table type metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); diff --git service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java index c9233d0..1b5b09a 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java @@ -36,6 +36,8 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetTablesOperation. @@ -43,6 +45,8 @@ */ public class GetTablesOperation extends MetadataOperation { + private static final Logger LOG = LoggerFactory.getLogger(GetTablesOperation.class.getName()); + private final String catalogName; private final String schemaName; private final String tableName; @@ -85,11 +89,16 @@ protected GetTablesOperation(HiveSession parentSession, tableTypeList = null; } this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info("Starting GetTablesOperation with the following parameters: " + + "catalogName={}, schemaName={}, tableName={}, tableTypes={}", + catalogName, schemaName, tableName, + tableTypeList != null ? tableTypeList.toString() : "null"); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching table metadata"); try { IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient(); String schemaPattern = convertSchemaPattern(schemaName); @@ -104,16 +113,27 @@ public void runInternal() throws HiveSQLException { for (TableMeta tableMeta : metastoreClient.getTableMeta(schemaPattern, tablePattern, tableTypeList)) { + String tableType = tableTypeMapping.mapToClientType(tableMeta.getTableType()); rowSet.addRow(new Object[] { DEFAULT_HIVE_CATALOG, tableMeta.getDbName(), tableMeta.getTableName(), - tableTypeMapping.mapToClientType(tableMeta.getTableType()), + tableType, tableMeta.getComments(), null, null, null, null, null }); + + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("table", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, DEFAULT_HIVE_CATALOG, tableMeta.getDbName(), + tableMeta.getTableName(), tableType, tableMeta.getComments()); + } + } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No table metadata has been returned."); } setState(OperationState.FINISHED); + LOG.info("Fetching table metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); diff --git service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java index ac078b4..e3d26e4 100644 --- service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java @@ -31,6 +31,8 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GetTypeInfoOperation. @@ -38,6 +40,8 @@ */ public class GetTypeInfoOperation extends MetadataOperation { + private static final Logger LOG = LoggerFactory.getLogger(GetTypeInfoOperation.class.getName()); + private final static TableSchema RESULT_SET_SCHEMA = new TableSchema() .addPrimitiveColumn("TYPE_NAME", Type.STRING_TYPE, "Type name") @@ -81,11 +85,13 @@ protected GetTypeInfoOperation(HiveSession parentSession) { super(parentSession, OperationType.GET_TYPE_INFO); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); + LOG.info("Starting GetTypeInfoOperation"); } @Override public void runInternal() throws HiveSQLException { setState(OperationState.RUNNING); + LOG.info("Fetching type info metadata"); if (isAuthV2Enabled()) { authorizeMetaGets(HiveOperationType.GET_TYPEINFO, null); } @@ -112,15 +118,22 @@ public void runInternal() throws HiveSQLException { type.getNumPrecRadix() //NUM_PREC_RADIX }; rowSet.addRow(rowData); + if (LOG.isDebugEnabled()) { + String debugMessage = getDebugMessage("type info", RESULT_SET_SCHEMA); + LOG.debug(debugMessage, rowData); + } + } + if (LOG.isDebugEnabled() && rowSet.numRows() == 0) { + LOG.debug("No type info metadata has been returned."); } setState(OperationState.FINISHED); + LOG.info("Fetching type info metadata has been successfully finished"); } catch (Exception e) { setState(OperationState.ERROR); throw new HiveSQLException(e); } } - /* (non-Javadoc) * @see org.apache.hive.service.cli.Operation#getResultSetSchema() */ diff --git service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java index bf7c021..3be21b5 100644 --- service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hive.service.cli.ColumnDescriptor; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationState; import org.apache.hive.service.cli.OperationType; @@ -151,4 +152,20 @@ public void cancel(OperationState stateAfterCancel) throws HiveSQLException { throw new UnsupportedOperationException("MetadataOperation.cancel()"); } + protected String getDebugMessage(final String type, final TableSchema resultSetSchema) { + StringBuilder debugMessage = new StringBuilder(); + debugMessage.append("Returning "); + debugMessage.append(type); + debugMessage.append(" metadata: "); + boolean firstColumn = true; + for (ColumnDescriptor column : resultSetSchema.getColumnDescriptors()) { + if (!firstColumn) { + debugMessage.append(", "); + } + debugMessage.append(column.getName()); + debugMessage.append("={}"); + firstColumn = false; + } + return debugMessage.toString(); + } }