Index: metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java =================================================================== --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (revision 1135227) +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (working copy) @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; @@ -1374,4 +1375,150 @@ List databases = synchronizedClient.getAllDatabases(); assertEquals(1, databases.size()); } + + public void testTableFilter() throws Exception { + try { + String dbName = "testTableFilter"; + String owner1 = "testOwner1"; + String owner2 = "testOwner2"; + int retention1 = 90; + int retention2 = 30; + String tableName1 = "table1"; + String tableName2 = "table2"; + String tableName3 = "table3"; + + client.dropTable(dbName, tableName1); + client.dropTable(dbName, tableName2); + client.dropTable(dbName, tableName3); + silentDropDatabase(dbName); + Database db = new Database(); + db.setName(dbName); + db.setDescription("Alter Partition Test database"); + client.createDatabase(db); + + Table table1 = createTableForTestFilter(dbName,tableName1, owner1, retention1, true); + Table table2 = createTableForTestFilter(dbName,tableName2, owner2, retention2, true); + Table table3 = createTableForTestFilter(dbName,tableName3, owner1, retention2, false); + + List tableNames; + //test owner + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.OWNER, + "owner like \".*Owner.*\" and owner like \"test.*\"", (short)-1); + assertEquals(tableNames.size(), 3); + assert(tableNames.contains(table1.getTableName())); + assert(tableNames.contains(table2.getTableName())); + assert(tableNames.contains(table3.getTableName())); + + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.OWNER, + "owner = \"testOwner1\"", (short)-1); + assertEquals(2, tableNames.size()); + assert(tableNames.contains(table1.getTableName())); + assert(tableNames.contains(table3.getTableName())); + + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.OWNER, + "owner = \"doesnotexist\"", (short)-1); + assertEquals(tableNames.size(), 0); + + + //test retention + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.RETENTION, + "retention = 30", (short)-1); + assertEquals(2, tableNames.size()); + assert(tableNames.contains(table2.getTableName())); + assert(tableNames.contains(table3.getTableName())); + + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.RETENTION, + "retention = 0 and retention = 10", (short)-1); + assertEquals(tableNames.size(), 0); + + for (Map.Entry e: table1.getParameters().entrySet()) { + System.out.println("table key: " + e.getKey() + " val: " + e.getValue()); + } + + //test params + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.PARAMS, + "test_param_2 = \"50\"", (short)-1); + assertEquals(2, tableNames.size()); + assert(tableNames.contains(table1.getTableName())); + assert(tableNames.contains(table2.getTableName())); + + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.PARAMS, + "test_param_2 = \"75\"", (short)-1); + assertEquals(0, tableNames.size()); + + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.PARAMS, + "key_dne = \"50\"", (short)-1); + assertEquals(0, tableNames.size()); + + tableNames = client.listTableNamesByFilter(dbName, QueryFilterType.PARAMS, + "test_param_1 <> \"yellow\"", (short)-1); + assertEquals(3, tableNames.size()); + + assert(tableNames.contains(table1.getTableName())); + assert(tableNames.contains(table2.getTableName())); + assert(tableNames.contains(table3.getTableName())); + + + client.dropTable(dbName, tableName1); + client.dropTable(dbName, tableName2); + client.dropTable(dbName, tableName3); + client.dropDatabase(dbName); + } catch (Exception e) { + System.err.println(StringUtils.stringifyException(e)); + System.err.println("testTableFilter() failed."); + throw e; + } + } + + private Table createTableForTestFilter(String dbName, String tableName, String owner, int retention, boolean hasSecondParam) throws Exception { + client.dropTable(dbName, tableName); + + ArrayList cols = new ArrayList(2); + cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, "")); + cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, "")); + + Table tbl = new Table(); + tbl.setDbName(dbName); + tbl.setTableName(tableName); + tbl.setParameters(new HashMap()); + tbl.getParameters().put("test_param_1", "hi"); + if (hasSecondParam) { + tbl.getParameters().put("test_param_2", "50"); + } + StorageDescriptor sd = new StorageDescriptor(); + tbl.setSd(sd); + sd.setCols(cols); + sd.setCompressed(false); + sd.setNumBuckets(1); + sd.setParameters(new HashMap()); + sd.getParameters().put("sd_param_1", "Use this for comments etc"); + sd.setBucketCols(new ArrayList(2)); + sd.getBucketCols().add("name"); + sd.setSerdeInfo(new SerDeInfo()); + sd.getSerdeInfo().setName(tbl.getTableName()); + sd.getSerdeInfo().setParameters(new HashMap()); + sd.getSerdeInfo().getParameters() + .put(Constants.SERIALIZATION_FORMAT, "1"); + sd.setSortCols(new ArrayList()); + + tbl.setOwner(owner); + tbl.setRetention(retention); + + tbl.setPartitionKeys(new ArrayList(2)); + tbl.getPartitionKeys().add( + new FieldSchema("ds", Constants.STRING_TYPE_NAME, "")); + tbl.getPartitionKeys().add( + new FieldSchema("hr", Constants.INT_TYPE_NAME, "")); + + client.createTable(tbl); + + if (isThriftClient) { + // the createTable() above does not update the location in the 'tbl' + // object when the client is a thrift client and the code below relies + // on the location being present in the 'tbl' object - so get the table + // from the metastore + tbl = client.getTable(dbName, tableName); + } + return tbl; + } } Index: metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (working copy) @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; @@ -130,6 +131,23 @@ public List getAllTables(String dbName) throws MetaException; + /** + * Gets a list of tables based on a filter string and filter type. + * @param dbName + * The name of the database from which you will retrieve the table names + * @param filterType + * The type of filter + * @param filter + * The filter string + * @param max_tables + * The maximum number of tables returned + * @return A list of table names that match the desired filter + * @throws MetaException + * @throws UnknownDBException + */ + public abstract List listTableNamesByFilter(String dbName, QueryFilterType filterType, + String filter, short max_tables) throws MetaException, UnknownDBException; + public abstract List listPartitionNames(String db_name, String tbl_name, short max_parts) throws MetaException; Index: metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (working copy) @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.UnknownDBException; @@ -93,7 +94,38 @@ public List getAllTables(String dbName) throws MetaException, TException, UnknownDBException; + /** + * Get a list of table names that match a filter and filter type. + * The filter operators are LIKE, <, <=, >, >=, =, <> + * The currently supported filter types are: + * QueryFilterType.OWNER: + * filter based on the owner of the table. All operators are supported. + * e.g., filter = "owner like ".*test_user.*" + * QueryFilterType.RETENTION: + * filter based on the retention time of the table. LIKE is not supported, + * because retention is an integer type. + * e.g., filter = "retention < 90 and retention > 30" + * QueryFilterType.PARAMS: + * filter based on the parameter key/values of the table. Only = amd <> is supported here. + * Note that the values here have to be strings. Also, = and <> only work for keys that exist + * in the tables. E.g., if you are looking for tables where key1 <> value, it will only + * look at tables that have a value for the parameter key1. + * e.g., filter = "numPartitions = \"2\" and retention_days = \"30\"" + * @param dbName + * The name of the database from which you will retrieve the table names + * @param filterType + * The type of filter + * @param filter + * The filter string + * @param max_tables + * The maximum number of tables returned + * @return A list of table names that match the desired filter + */ + public List listTableNamesByFilter(String dbName, QueryFilterType filterType, + String filter, short maxTables) + throws MetaException, TException, InvalidOperationException, UnknownDBException; + /** * Drop the table. * Index: metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java (working copy) @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.serde.Constants; @@ -103,13 +104,23 @@ this.rhs = rhs; } - public String generateJDOFilter(Table table, Map params) + /** + * Generates a JDO filter statement + * @param table + * The table for the filter. table can be null for filter types + * that span multiple tables (e.g,. those other than PARTITIONS) + * @param params + * A map of parameter key to values for the filter statement + * @return a JDO filter statement + * @throws MetaException + */ + public String generateJDOFilter(QueryFilterType filterType, Table table, Map params) throws MetaException { StringBuilder filterBuffer = new StringBuilder(); if ( lhs != null) { filterBuffer.append (" ("); - filterBuffer.append(lhs.generateJDOFilter(table, params)); + filterBuffer.append(lhs.generateJDOFilter(filterType, table, params)); if (rhs != null) { if( andOr == LogicalOperator.AND ) { @@ -118,14 +129,13 @@ filterBuffer.append(" || "); } - filterBuffer.append(rhs.generateJDOFilter(table, params)); + filterBuffer.append(rhs.generateJDOFilter(filterType, table, params)); } filterBuffer.append (") "); } return filterBuffer.toString(); } - } /** @@ -134,12 +144,90 @@ public static class LeafNode extends TreeNode { public String keyName; public Operator operator; - public String value; + public Object value; public boolean isReverseOrder = false; private static final String PARAM_PREFIX = "hive_filter_param_"; @Override - public String generateJDOFilter(Table table, Map params) + public String generateJDOFilter(QueryFilterType filterType, Table table, + Map params) + throws MetaException { + switch (filterType) { + case PARTITIONS: + return generateJDOFilterOverPartitions(table, params); + case OWNER: + return generateJDOFilterOwner(params); + case RETENTION: + return generateJDOFilterRetention(params); + case PARAMS: + return generateJDOFilterParams(params); + default: + throw new MetaException("This QueryFilterType is unsupported for creating a filter"); + } + } + + private String generateJDOFilterRetention(Map params) + throws MetaException { + keyName = "this.retention"; + if (operator == Operator.LIKE) { + throw new MetaException("The LIKE operator is unsupported for filter type RETENTION"); + } + return generateJDOFilterGeneral(params); + } + + private String generateJDOFilterOwner(Map params) + throws MetaException { + keyName = "this.owner"; + return generateJDOFilterGeneral(params); + } + + /** + * can only support "=" for now, because of JDO-174, which is not in the current + * version of our lib. + */ + private String generateJDOFilterParams(Map params) + throws MetaException { + if (!(operator == Operator.EQUALS || operator == Operator.NOTEQUALS)) { + throw new MetaException("Only = and <> are supported opreators for the filter type PARAMS"); + } + keyName = "this.parameters.get(\"" + keyName + "\")"; + //value is persisted as a string in the db + value = value.toString(); + return generateJDOFilterGeneral(params); + } + + /** + * Generates a general filter. Given a map of , generates a statement of the form: + * key1 operator value2 (&& | || ) key2 operator value2 ... + * + * Currently supported types for value are String and Integer. + * The LIKE operator for Integers is unsupported. + */ + private String generateJDOFilterGeneral(Map params) + throws MetaException { + String paramName = PARAM_PREFIX + params.size(); + params.put(paramName, value); + String filter; + + if (isReverseOrder) { + if (operator == Operator.LIKE) { + throw new MetaException( + "Value should be on the RHS for LIKE operator : " + + "Key <" + keyName + ">"); + } else { + filter = paramName + " " + operator.getJdoOp() + " " + keyName; + } + } else { + if (operator == Operator.LIKE) { + filter = " " + keyName + "." + operator.getJdoOp() + "(" + paramName + ") "; + } else { + filter = " " + keyName + " " + operator.getJdoOp() + " " + paramName; + } + } + return filter; + } + + private String generateJDOFilterOverPartitions(Table table, Map params) throws MetaException { int partitionColumnCount = table.getPartitionKeys().size(); @@ -159,14 +247,22 @@ "> is not a partitioning key for the table"); } + //Can only support partitions whose types are string if( ! table.getPartitionKeys().get(partitionColumnIndex). getType().equals(Constants.STRING_TYPE_NAME) ) { throw new MetaException ("Filtering is supported only on partition keys of type string"); } + String valueParam = null; + try { + valueParam = (String) value; + } catch (ClassCastException e) { + throw new MetaException("Filtering is supported only on partition keys of type string"); + } + String paramName = PARAM_PREFIX + params.size(); - params.put(paramName, value); + params.put(paramName, valueParam); String filter; String keyEqual = FileUtils.escapePathName(keyName) + "="; @@ -187,9 +283,8 @@ throw new MetaException( "Value should be on the RHS for LIKE operator : " + "Key <" + keyName + ">"); - } - else if (operator == Operator.EQUALS) { - filter = makeFilterForEquals(keyName, value, paramName, params, + } else if (operator == Operator.EQUALS) { + filter = makeFilterForEquals(keyName, valueParam, paramName, params, partitionColumnIndex, partitionColumnCount); } else { filter = paramName + @@ -201,7 +296,7 @@ filter = " " + valString + "." + operator.getJdoOp() + "(" + paramName + ") "; } else if (operator == Operator.EQUALS) { - filter = makeFilterForEquals(keyName, value, paramName, params, + filter = makeFilterForEquals(keyName, valueParam, paramName, params, partitionColumnIndex, partitionColumnCount); } else { filter = " " + valString + " " @@ -231,7 +326,7 @@ * @throws MetaException */ private static String makeFilterForEquals(String keyName, String value, - String paramName, Map params, int keyPos, int keyCount) + String paramName, Map params, int keyPos, int keyCount) throws MetaException { Map partKeyToVal = new HashMap(); partKeyToVal.put(keyName, value); @@ -307,13 +402,13 @@ * @return the string representation of the expression tree * @throws MetaException */ - public String generateJDOFilter(Table table, - Map params) throws MetaException { + public String generateJDOFilter(QueryFilterType filterType, Table table, + Map params) throws MetaException { if( root == null ) { return ""; } - return root.generateJDOFilter(table, params); + return root.generateJDOFilter(filterType, table, params); } /** Case insensitive ANTLR string stream */ Index: metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g (working copy) @@ -63,17 +63,26 @@ operatorExpression @init { boolean isReverseOrder = false; + Object val = null; } : ( - (key = Identifier op = operator value = StringLiteral) - | - (value = StringLiteral op = operator key = Identifier) { isReverseOrder = true; } + ( + (key = Identifier op = operator value = StringLiteral) + | + (value = StringLiteral op = operator key = Identifier) { isReverseOrder = true; } + ) { val = TrimQuotes(value.getText()); } + | + ( + (key = Identifier op = operator value = IntLiteral) + | + (value = IntLiteral op = operator key = Identifier) { isReverseOrder = true; } + ) { val = Integer.parseInt(value.getText()); } ) { LeafNode node = new LeafNode(); node.keyName = key.getText(); - node.value = TrimQuotes(value.getText()); + node.value = val; node.operator = op; node.isReverseOrder = isReverseOrder; @@ -121,10 +130,16 @@ ) ; + +IntLiteral + : + (Digit)+ + ; + Identifier : (Letter | Digit) (Letter | Digit | '_')* ; WS : (' '|'\r'|'\t'|'\n')+ { skip(); } ; - + \ No newline at end of file Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (working copy) @@ -51,6 +51,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; @@ -694,6 +695,13 @@ return deepCopyTables(client.get_table_objects_by_name(dbName, tableNames)); } + /** {@inheritDoc} */ + public List listTableNamesByFilter(String dbName, QueryFilterType filterType, + String filter, short maxTables) + throws MetaException, TException, InvalidOperationException, UnknownDBException { + return client.get_table_names_by_filter(dbName, filterType, filter, maxTables); + } + /** * @param name * @return the type Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -61,6 +61,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; @@ -1187,6 +1188,40 @@ return tables; } + @Override + public List get_table_names_by_filter(final String dbName, final QueryFilterType filterType, final String filter, + final short maxTables) throws MetaException, InvalidOperationException, UnknownDBException { + List tables = new ArrayList(); + startFunction("get_table_names_by_filter", ": db = " + dbName + ", filter = " + filter); + try { + tables = executeWithRetry(new Command>() { + @Override + public List run(RawStore ms) throws Exception { + if (dbName == null || dbName.isEmpty()) { + throw new UnknownDBException("DB name is null or empty"); + } + if (filter == null) + { + throw new InvalidOperationException(filter + " cannot apply null filter"); + } + List tables = ms.listTableNamesByFilter(dbName, filterType, filter, maxTables); + return tables; + } + }); + } catch (MetaException e) { + throw e; + } catch (InvalidOperationException e) { + throw e; + } catch (UnknownDBException e) { + throw e; + } catch (Exception e) { + throw new MetaException(e.toString()); + } finally { + endFunction("get_table_names_by_filter"); + } + return tables; + } + public boolean set_table_parameters(String dbname, String name, Map params) throws NoSuchObjectException, MetaException { endFunction(startTableFunction("set_table_parameters", dbname, name)); Index: metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (revision 1135227) +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (working copy) @@ -66,6 +66,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.QueryFilterType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; @@ -1361,8 +1362,25 @@ return parts; } + private FilterParser getFilterParser(String filter) throws MetaException { + CharStream cs = new ANTLRNoCaseStringStream(filter); + FilterLexer lexer = new FilterLexer(cs); + + CommonTokenStream tokens = new CommonTokenStream(); + tokens.setTokenSource (lexer); + + FilterParser parser = new FilterParser(tokens); + + try { + parser.filter(); + } catch(RecognitionException re) { + throw new MetaException("Error parsing partition filter : " + re); + } + return parser; + } + private String makeQueryFilterString(MTable mtable, String filter, - Map params) + Map params) throws MetaException { StringBuilder queryBuilder = new StringBuilder( "table.tableName == t1 && table.database.name == t2"); @@ -1371,21 +1389,27 @@ Table table = convertToTable(mtable); - CharStream cs = new ANTLRNoCaseStringStream(filter); - FilterLexer lexer = new FilterLexer(cs); + FilterParser parser = getFilterParser(filter); + String jdoFilter = parser.tree.generateJDOFilter(QueryFilterType.PARTITIONS, table, params); + LOG.debug("jdoFilter = " + jdoFilter); - CommonTokenStream tokens = new CommonTokenStream(); - tokens.setTokenSource (lexer); - - FilterParser parser = new FilterParser(tokens); - - try { - parser.filter(); - } catch(RecognitionException re) { - throw new MetaException("Error parsing partition filter : " + re); + if( jdoFilter.trim().length() > 0 ) { + queryBuilder.append(" && ( "); + queryBuilder.append(jdoFilter.trim()); + queryBuilder.append(" )"); } + } + return queryBuilder.toString(); + } - String jdoFilter = parser.tree.generateJDOFilter(table, params); + private String makeTableQueryFilterString(QueryFilterType filterType, String filter, + Map params, short maxParts) + throws MetaException { + StringBuilder queryBuilder = new StringBuilder("database.name == dbName"); + if (filter != null && filter.length() > 0) { + LOG.debug("making table query filter from filter: " + filter); + FilterParser parser = getFilterParser(filter); + String jdoFilter = parser.tree.generateJDOFilter(filterType, null, params); LOG.debug("jdoFilter = " + jdoFilter); if( jdoFilter.trim().length() > 0 ) { @@ -1400,12 +1424,26 @@ private String makeParameterDeclarationString(Map params) { //Create the parameter declaration string StringBuilder paramDecl = new StringBuilder(); - for(String key : params.keySet() ) { - paramDecl.append(", java.lang.String " + key); + for (String key : params.keySet()) { + paramDecl.append(", java.lang.String "); + paramDecl.append(key); } return paramDecl.toString(); } + private String makeParameterDeclarationStringObj(Map params) { + //Create the parameter declaration string + StringBuilder paramDecl = new StringBuilder(); + for (Entry entry : params.entrySet()) { + paramDecl.append(", "); + paramDecl.append(entry.getValue().getClass().getName()); + paramDecl.append(" "); + paramDecl.append(entry.getKey()); + } + LOG.debug("parameter declaration string: " + paramDecl.toString()); + return paramDecl.toString(); + } + private List listMPartitionsByFilter(String dbName, String tableName, String filter, short maxParts) throws MetaException, NoSuchObjectException{ boolean success = false; @@ -1421,7 +1459,7 @@ throw new NoSuchObjectException("Specified database/table does not exist : " + dbName + "." + tableName); } - Map params = new HashMap(); + Map params = new HashMap(); String queryFilterString = makeQueryFilterString(mtable, filter, params); @@ -1439,7 +1477,7 @@ params.put("t1", tableName.trim()); params.put("t2", dbName.trim()); - String parameterDeclaration = makeParameterDeclarationString(params); + String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setOrdering("partitionName ascending"); @@ -1458,6 +1496,51 @@ } @Override + public List listTableNamesByFilter(String dbName, QueryFilterType filterType, + String filter, short maxTables) + throws MetaException { + boolean success = false; + List tableNames = new ArrayList(); + try { + openTransaction(); + LOG.debug("Executing listTableNamesByFilter"); + dbName = dbName.toLowerCase().trim(); + Map params = new HashMap(); + String queryFilterString = makeTableQueryFilterString(filterType, filter, params, maxTables); + Query query = pm.newQuery(MTable.class); + query.declareImports("import java.lang.String"); + query.setResult("tableName"); + query.setResultClass(java.lang.String.class); + if (maxTables >= 0) { + query.setRange(0, maxTables); + } + LOG.debug("filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); + params.put("dbName", dbName); + for (Entry entry : params.entrySet()) { + LOG.debug("key: " + entry.getKey() + " value: " + entry.getValue() + + " class: " + entry.getValue().getClass().getName()); + } + String paramterDeclaration = makeParameterDeclarationStringObj(params); + query.declareParameters(paramterDeclaration); + query.setFilter(queryFilterString); + Collection names = (Collection) query.executeWithMap(params); + tableNames = new ArrayList(); + for (Iterator i = names.iterator(); i.hasNext();) { + tableNames.add((String) i.next()); + } + LOG.debug("Done executing query for listTableNamesByFilter"); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listTableNamesByFilter"); + + } finally { + if (!success) { + rollbackTransaction(); + } + } + return tableNames; + } + + @Override public List listPartitionNamesByFilter(String dbName, String tableName, String filter, short maxParts) throws MetaException { boolean success = false; @@ -1474,7 +1557,7 @@ // table or db does not exist, we return an empty list return partNames; } - Map params = new HashMap(); + Map params = new HashMap(); String queryFilterString = makeQueryFilterString(mtable, filter, params); @@ -1494,7 +1577,7 @@ params.put("t1", tableName.trim()); params.put("t2", dbName.trim()); - String parameterDeclaration = makeParameterDeclarationString(params); + String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setOrdering("partitionName ascending"); query.setResult("partitionName"); Index: metastore/if/hive_metastore.thrift =================================================================== --- metastore/if/hive_metastore.thrift (revision 1135227) +++ metastore/if/hive_metastore.thrift (working copy) @@ -43,6 +43,15 @@ GROUP = 3, } +enum QueryFilterType { + OWNER = 1, + RETENTION = 2, + PARAMS = 3, + PARTITIONS = 4, +} + +const string HIVE_FILTER_LAST_ACCESS_TIME = "lastAccessTime" + struct HiveObjectRef{ 1: HiveObjectType objectType, 2: string dbName, @@ -248,6 +257,25 @@ throws (1:MetaException o1, 2:NoSuchObjectException o2) list get_table_objects_by_name(1:string dbname, 2:list tbl_names) throws (1:MetaException o1, 2:InvalidOperationException o2, 3:UnknownDBException o3) + + // Get a list of table names that match a filter and filter type. + // The filter operators are LIKE, <, <=, >, >=, =, <> + // The currently supported filter types are: + // QueryFilterType.OWNER: + // filter based on the owner of the table. All operators are supported. + // e.g., filter = "owner like ".*test_user.*" + // QueryFilterType.RETENTION: + // filter based on the retention time of the table. LIKE is not supported, + // because retention is an integer type. + // e.g., filter = "retention < 90 and retention > 30" + // QueryFilterType.PARAMS: + // filter based on the parameter key/values of the table. Only = amd <> is supported here. + // Note that the values here have to be strings. Also, = and <> only work for keys that exist + // in the tables. E.g., if you are looking for tables where key1 <> value, it will only + // look at tables that have a value for the parameter key1. + // e.g., filter = "numPartitions = \"2\" and retention_days = \"30\"" + list get_table_names_by_filter(1:string dbname, 2:QueryFilterType filterType, 3:string filter, 4:i16 max_tables=-1) + throws (1:MetaException o1, 2:InvalidOperationException o2, 3:UnknownDBException o3) // alter table applies to only future partitions not for existing partitions // * See notes on DDL_TIME