Index: src/java/org/apache/hadoop/hbase/rest/TableHandler.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/TableHandler.java (revision 696962) +++ src/java/org/apache/hadoop/hbase/rest/TableHandler.java (working copy) @@ -239,10 +239,22 @@ } private void deleteTable(HttpServletRequest request, - HttpServletResponse response, String[] pathSegments) throws IOException { - String tableName = pathSegments[0]; - admin.deleteTable(tableName); - response.setStatus(202); + HttpServletResponse response, String[] pathSegments) + throws ServletException { + try { + String tableName = pathSegments[0]; + String[] column_params = request.getParameterValues(COLUMN); + if (column_params != null && column_params.length > 0) { + for (String column : column_params) { + admin.deleteColumn(tableName, makeColumnName(column)); + } + } else { + admin.deleteTable(tableName); + } + response.setStatus(202); + } catch (Exception e) { + throw new ServletException(e); + } } private void putTableXml(HttpServletRequest @@ -303,12 +315,18 @@ try { String tableName = pathSegments[0]; + HTableDescriptor htd = admin.getTableDescriptor(tableName); + + NodeList columnfamily_nodes = doc.getElementsByTagName("columnfamily"); - NodeList columnfamily_nodes = doc.getElementsByTagName("columnfamily"); for (int i = 0; i < columnfamily_nodes.getLength(); i++) { Element columnfamily = (Element) columnfamily_nodes.item(i); HColumnDescriptor hcd = putColumnFamilyXml(columnfamily); - admin.modifyColumn(tableName, hcd.getNameAsString(), hcd); + if (htd.hasFamily(Bytes.toBytes(hcd.getNameAsString()))) { + admin.modifyColumn(tableName, hcd.getNameAsString(), hcd); + } else { + admin.addColumn(tableName, hcd); + } } } catch (Exception e) { throw new ServletException(e); @@ -317,12 +335,8 @@ private HColumnDescriptor putColumnFamilyXml(Element columnfamily) { Node name_node = columnfamily.getElementsByTagName("name").item(0); - String colname = name_node.getFirstChild().getNodeValue(); + String colname = makeColumnName(name_node.getFirstChild().getNodeValue()); - if (colname.indexOf(":") == -1) { - colname += ":"; - } - int max_versions = HColumnDescriptor.DEFAULT_VERSIONS; NodeList max_versions_list = columnfamily.getElementsByTagName("max-versions"); if (max_versions_list.getLength() > 0) { @@ -358,6 +372,7 @@ if (bloomfilter_list.getLength() > 0) { bloomfilter = Boolean.valueOf(bloomfilter_list.item(0).getFirstChild().getNodeValue()); } - return new HColumnDescriptor(Bytes.toBytes(colname), max_versions, compression, in_memory, block_cache, max_cell_size, ttl, bloomfilter); + return new HColumnDescriptor(Bytes.toBytes(colname), max_versions, + compression, in_memory, block_cache, max_cell_size, ttl, bloomfilter); } } Index: src/java/org/apache/hadoop/hbase/rest/GenericHandler.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/GenericHandler.java (revision 696962) +++ src/java/org/apache/hadoop/hbase/rest/GenericHandler.java (working copy) @@ -279,4 +279,10 @@ protected HTable getTable(final String tableName) throws IOException { return new HTable(this.conf, Bytes.toBytes(tableName)); } + + protected String makeColumnName(String column) { + if (column.indexOf(':') == -1) + column += ':'; + return column; + } } Index: src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java =================================================================== --- src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (revision 696962) +++ src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (working copy) @@ -117,6 +117,16 @@ return this.connection.listTables(); } + public HTableDescriptor getTableDescriptor(final String tableName) + throws IOException { + return getTableDescriptor(Bytes.toBytes(tableName)); + } + + public HTableDescriptor getTableDescriptor(final byte [] tableName) + throws IOException { + return this.connection.getHTableDescriptor(tableName); + } + private long getPauseTime(int tries) { if (tries >= HConstants.RETRY_BACKOFF.length) tries = HConstants.RETRY_BACKOFF.length - 1; Index: bin/HBase.rb =================================================================== --- bin/HBase.rb (revision 696962) +++ bin/HBase.rb (working copy) @@ -30,6 +30,7 @@ STARTROW = "STARTROW" ENDROW = STOPROW LIMIT = "LIMIT" + METHOD = "METHOD" # Wrapper for org.apache.hadoop.hbase.client.HBaseAdmin class Admin @@ -123,11 +124,22 @@ end def alter(tableName, args) - now = Time.now + now = Time.now raise TypeError.new("Table name must be of type String") \ unless tableName.instance_of? String - descriptor = hcd(args) - @admin.modifyColumn(tableName, descriptor.getNameAsString(), descriptor); + htd = @admin.getTableDescriptor(tableName.to_java_bytes) + method = args.delete(METHOD) + if method == "delete" + @admin.deleteColumn(tableName, makeColumnName(args[NAME])) + else + descriptor = hcd(args) + if (htd.hasFamily(descriptor.getNameAsString().to_java_bytes)) + @admin.modifyColumn(tableName, descriptor.getNameAsString(), + descriptor); + else + @admin.addColumn(tableName, descriptor); + end + end @formatter.header() @formatter.footer(now) end Index: bin/hirb.rb =================================================================== --- bin/hirb.rb (revision 696962) +++ bin/hirb.rb (working copy) @@ -108,12 +108,15 @@ alter Alter column family schema; pass table name and a dictionary specifying new column family schema. Dictionaries are described below in the GENERAL NOTES section. Dictionary must include name - of column family to alter. For example, to change the 'f1' column - family in table 't1' from defaults to instead keep a maximum of 5 - cell VERSIONS, do: - + of column family to alter. For example, + + To change or add the 'f1' column family in table 't1' from defaults + to instead keep a maximum of 5 cell VERSIONS, do: hbase> alter 't1', {NAME => 'f1', VERSIONS => 5} + To delete the 'f1' column family in table 't1', do: + hbase> alter 't1', {NAME => 'f1', METHOD => 'delete'} + count Count the number of rows in a table. This operation may take a LONG time (Run '$HADOOP_HOME/bin/hadoop jar hbase.jar rowcount' to run a counting mapreduce job). Current count is shown every 1000 rows by