Index: src/java/org/apache/hadoop/hbase/rest/RowHandler.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/RowHandler.java (revision 686941) +++ src/java/org/apache/hadoop/hbase/rest/RowHandler.java (working copy) @@ -2,9 +2,9 @@ import java.io.IOException; import java.net.URLDecoder; -import java.util.HashSet; +import java.util.ArrayList; +import java.util.List; import java.util.Map; -import java.util.Set; import java.util.TreeMap; import javax.servlet.ServletException; @@ -72,7 +72,7 @@ final HttpServletResponse response, final String [] pathSegments) throws IOException { // pull the row key out of the path - String row = URLDecoder.decode(pathSegments[2], HConstants.UTF8_ENCODING); + byte[] row = Bytes.toBytes(URLDecoder.decode(pathSegments[2], HConstants.UTF8_ENCODING)); String timestampStr = null; if (pathSegments.length == 4) { @@ -85,64 +85,63 @@ } } - String[] columns = request.getParameterValues(COLUMN); - - if (columns == null || columns.length == 0) { - // They want full row returned. - - // Presumption is that this.table has already been focused on target table. - Map result = timestampStr == null ? - table.getRow(Bytes.toBytes(row)) - : table.getRow(Bytes.toBytes(row), Long.parseLong(timestampStr)); - + String[] column_params = request.getParameterValues(COLUMN); + + byte[][] columns = null; + + if (column_params != null && column_params.length > 0) { + List available_columns = new ArrayList(); + for (String column_param : column_params) { + if (column_param.length() > 0 && table.getTableDescriptor().hasFamily(Bytes.toBytes(column_param))) { + available_columns.add(column_param); + } + } + columns = Bytes.toByteArrays(available_columns.toArray(new String[0])); + } + + String[] version_params = request.getParameterValues(VERSION); + int version = 0; + if (version_params != null && version_params.length == 1) { + version = Integer.parseInt(version_params[0]); + } + + if (version > 0 && columns != null) { + Map result = new TreeMap(Bytes.BYTES_COMPARATOR); + + for (byte[] col : columns) { + Cell[] cells = timestampStr == null ? table.get(row, col, version) + : table.get(row, col, Long.parseLong(timestampStr), version); + if (cells != null) { + result.put(col, cells); + } + } + if (result == null || result.size() == 0) { doNotFound(response, "Row not found!"); } else { switch (ContentType.getContentType(request.getHeader(ACCEPT))) { case XML: - outputRowXml(response, result); + outputRowWithMultiVersionsXml(response, result); break; case MIME: default: - doNotAcceptable(response, "Unsupported Accept Header Content: " + - request.getHeader(CONTENT_TYPE)); + doNotAcceptable(response, "Unsupported Accept Header Content: " + + request.getHeader(CONTENT_TYPE)); } - } + } } else { - Map prefiltered_result = table.getRow(Bytes.toBytes(row)); - - if (prefiltered_result == null || prefiltered_result.size() == 0) { + Map result = timestampStr == null ? table.getRow(row, columns) : table.getRow(row, columns, Long.parseLong(timestampStr)); + if (result == null || result.size() == 0) { doNotFound(response, "Row not found!"); } else { - // create a Set from the columns requested so we can - // efficiently filter the actual found columns - Set requested_columns_set = new HashSet(); - for(int i = 0; i < columns.length; i++){ - requested_columns_set.add(columns[i]); - } - - // output map that will contain the filtered results - Map m = - new TreeMap(Bytes.BYTES_COMPARATOR); - - // get an array of all the columns retrieved - Set columns_retrieved = prefiltered_result.keySet(); - - // copy over those cells with requested column names - for(byte [] current_column: columns_retrieved) { - if (requested_columns_set.contains(Bytes.toString(current_column))) { - m.put(current_column, prefiltered_result.get(current_column)); - } - } - switch (ContentType.getContentType(request.getHeader(ACCEPT))) { - case XML: - outputRowXml(response, m); - break; - case MIME: - default: - doNotAcceptable(response, "Unsupported Accept Header Content: " + - request.getHeader(CONTENT_TYPE)); + case XML: + outputRowXml(response, result); + break; + case MIME: + default: + doNotAcceptable(response, "Unsupported Accept Header Content: " + + request.getHeader(CONTENT_TYPE)); } } } @@ -167,6 +166,18 @@ outputter.getWriter().close(); } + private void outputRowWithMultiVersionsXml(final HttpServletResponse response, + final Map result) + throws IOException { + setResponseHeader(response, result.size() > 0? 200: 204, + ContentType.XML.toString()); + XMLOutputter outputter = getXMLOutputter(response.getWriter()); + outputter.startTag(ROW); + outputColumnsWithMultiVersionsXml(outputter, result); + outputter.endTag(); + outputter.endDocument(); + outputter.getWriter().close(); + } /* * @param response * @param result Index: src/java/org/apache/hadoop/hbase/rest/GenericHandler.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/GenericHandler.java (revision 686941) +++ src/java/org/apache/hadoop/hbase/rest/GenericHandler.java (working copy) @@ -55,6 +55,7 @@ protected static final String CONTENT_TYPE = "content-type"; protected static final String ROW = "row"; protected static final String REGIONS = "regions"; + protected static final String VERSION = "version"; protected final Log LOG = LogFactory.getLog(this.getClass()); @@ -233,13 +234,32 @@ outputter.startTag(COLUMN); doElement(outputter, "name", org.apache.hadoop.hbase.util.Base64.encodeBytes(e.getKey())); - // We don't know String from binary data so we always base64 encode. - doElement(outputter, "value", - org.apache.hadoop.hbase.util.Base64.encodeBytes(e.getValue().getValue())); + outputCellXml(outputter, e.getValue()); outputter.endTag(); } } + protected void outputColumnsWithMultiVersionsXml(final XMLOutputter outputter, + final Map m) + throws IllegalStateException, IllegalArgumentException, IOException { + for (Map.Entry e: m.entrySet()) { + for (Cell c : e.getValue()) { + outputter.startTag(COLUMN); + doElement(outputter, "name", + org.apache.hadoop.hbase.util.Base64.encodeBytes(e.getKey())); + outputCellXml(outputter, c); + outputter.endTag(); + } + } + } + + protected void outputCellXml(final XMLOutputter outputter, Cell c) + throws IllegalStateException, IllegalArgumentException, IOException { + // We don't know String from binary data so we always base64 encode. + doElement(outputter, "value", + org.apache.hadoop.hbase.util.Base64.encodeBytes(c.getValue())); + doElement(outputter, "timestamp", String.valueOf(c.getTimestamp())); + } // Commented - multipart support is currently nonexistant. // protected void outputColumnsMime(final MultiPartResponse mpr, // final Map m)