Index: NOTICE.txt =================================================================== --- NOTICE.txt (revision 932666) +++ NOTICE.txt (working copy) @@ -4,37 +4,3 @@ In addition, this product includes software developed by: Facebook, Inc. (http://developers.facebook.com/thrift/ -- Page includes the Thrift Software License) - -JUnit (http://www.junit.org/) - -The JSON jar source is here: http://www.json.org/java/index.html - -Michael Gottesman developed AgileJSON. Its source code is here: - - http://github.com/gottesmm/agile-json-2.0/tree/master - -It has this license at the head of the each source file: - - * Permission is hereby granted, free of charge, to any person obtaining a - * copy - * of this software and associated documentation files (the "Software"), to - * deal - * in the Software without restriction, including without limitation the - * rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all - * copies or substantial portions of the Software. - * - * The Software shall be used for Good, not Evil. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. Index: lib/AgileJSON-2009-03-30.jar =================================================================== Cannot display: file marked as a binary type. svn:mime-type = application/octet-stream Index: src/java/org/apache/hadoop/hbase/HColumnDescriptor.java =================================================================== --- src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (working copy) @@ -36,8 +36,6 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; -import agilejson.TOJSON; - /** * An HColumnDescriptor contains information about a column family such as the * number of versions, compression settings, etc. @@ -331,7 +329,6 @@ /** * @return Name of this column family with colon as required by client API */ - @TOJSON(fieldName = "name", base64=true) public byte [] getNameWithColon() { return Bytes.add(this.name, new byte[]{':'}); } @@ -397,14 +394,12 @@ } /** @return compression type being used for the column family */ - @TOJSON public Compression.Algorithm getCompression() { String n = getValue(COMPRESSION); return Compression.Algorithm.valueOf(n.toUpperCase()); } /** @return maximum number of versions */ - @TOJSON public synchronized int getMaxVersions() { if (this.cachedMaxVersions == -1) { String value = getValue(HConstants.VERSIONS); @@ -424,7 +419,6 @@ /** * @return Blocksize. */ - @TOJSON public synchronized int getBlocksize() { if (this.blocksize == null) { String value = getValue(BLOCKSIZE); @@ -445,7 +439,6 @@ /** * @return Compression type setting. */ - @TOJSON public Compression.Algorithm getCompressionType() { return getCompression(); } @@ -470,7 +463,6 @@ /** * @return True if we are to keep all in use HRegionServer cache. */ - @TOJSON(prefixLength = 2) public boolean isInMemory() { String value = getValue(HConstants.IN_MEMORY); if (value != null) @@ -489,7 +481,6 @@ /** * @return Time-to-live of cell contents, in seconds. */ - @TOJSON public int getTimeToLive() { String value = getValue(TTL); return (value != null)? Integer.valueOf(value).intValue(): DEFAULT_TTL; @@ -505,7 +496,6 @@ /** * @return True if MapFile blocks should be cached. */ - @TOJSON(prefixLength = 2) public boolean isBlockCacheEnabled() { String value = getValue(BLOCKCACHE); if (value != null) @@ -523,7 +513,6 @@ /** * @return true if a bloom filter is enabled */ - @TOJSON(prefixLength = 2) public boolean isBloomfilter() { String value = getValue(BLOOMFILTER); if (value != null) Index: src/java/org/apache/hadoop/hbase/rest/TableModel.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/TableModel.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/TableModel.java (working copy) @@ -37,8 +37,6 @@ import org.apache.hadoop.hbase.rest.serializer.ISerializable; import org.apache.hadoop.hbase.util.Bytes; -import agilejson.TOJSON; - public class TableModel extends AbstractModel { @SuppressWarnings("unused") @@ -256,7 +254,6 @@ /** * @return the regionKey */ - @TOJSON(fieldName = "region") public byte[][] getRegionKey() { return regionKey; } Index: src/java/org/apache/hadoop/hbase/rest/parser/JsonRestParser.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/parser/JsonRestParser.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/parser/JsonRestParser.java (working copy) @@ -1,234 +0,0 @@ -/** - * Copyright 2007 The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.rest.parser; - -import java.util.ArrayList; - -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.rest.RESTConstants; -import org.apache.hadoop.hbase.rest.descriptors.RowUpdateDescriptor; -import org.apache.hadoop.hbase.rest.descriptors.ScannerDescriptor; -import org.apache.hadoop.hbase.rest.exception.HBaseRestException; -import org.apache.hadoop.hbase.util.Bytes; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -/** - * - */ -public class JsonRestParser implements IHBaseRestParser { - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.parser.IHBaseRestParser#getTableDescriptor - * (byte[]) - */ - public HTableDescriptor getTableDescriptor(byte[] input) - throws HBaseRestException { - try { - JSONObject o; - HTableDescriptor h; - JSONArray columnDescriptorArray; - o = new JSONObject(new String(input)); - columnDescriptorArray = o.getJSONArray("column_families"); - h = new HTableDescriptor(o.getString("name")); - - for (int i = 0; i < columnDescriptorArray.length(); i++) { - JSONObject json_columnDescriptor = columnDescriptorArray - .getJSONObject(i); - h.addFamily(this.getColumnDescriptor(json_columnDescriptor)); - } - return h; - } catch (Exception e) { - throw new HBaseRestException(e); - } - } - - private HColumnDescriptor getColumnDescriptor(JSONObject jsonObject) - throws JSONException { - String strTemp; - strTemp = jsonObject.getString("name"); - if (strTemp.charAt(strTemp.length() - 1) != ':') { - strTemp += ":"; - } - - byte[] name = Bytes.toBytes(strTemp); - - int maxVersions; - String cType; - boolean inMemory; - boolean blockCacheEnabled; - int maxValueLength; - int timeToLive; - boolean bloomfilter; - - try { - bloomfilter = jsonObject.getBoolean("bloomfilter"); - } catch (JSONException e) { - bloomfilter = false; - } - - try { - maxVersions = jsonObject.getInt("max_versions"); - } catch (JSONException e) { - maxVersions = 3; - } - - try { - cType = jsonObject.getString("compression_type").toUpperCase(); - } catch (JSONException e) { - cType = HColumnDescriptor.DEFAULT_COMPRESSION; - } - - try { - inMemory = jsonObject.getBoolean("in_memory"); - } catch (JSONException e) { - inMemory = false; - } - - try { - blockCacheEnabled = jsonObject.getBoolean("block_cache_enabled"); - } catch (JSONException e) { - blockCacheEnabled = false; - } - - try { - maxValueLength = jsonObject.getInt("max_value_length"); - } catch (JSONException e) { - maxValueLength = 2147483647; - } - - try { - timeToLive = jsonObject.getInt("time_to_live"); - } catch (JSONException e) { - timeToLive = Integer.MAX_VALUE; - } - - return new HColumnDescriptor(name, maxVersions, cType, inMemory, - blockCacheEnabled, maxValueLength, timeToLive, bloomfilter); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.parser.IHBaseRestParser#getColumnDescriptors - * (byte[]) - */ - public ArrayList getColumnDescriptors(byte[] input) - throws HBaseRestException { - ArrayList columns = new ArrayList(); - try { - JSONObject o; - JSONArray columnDescriptorArray; - o = new JSONObject(new String(input)); - columnDescriptorArray = o.getJSONArray("column_families"); - - for (int i = 0; i < columnDescriptorArray.length(); i++) { - JSONObject json_columnDescriptor = columnDescriptorArray - .getJSONObject(i); - columns.add(this.getColumnDescriptor(json_columnDescriptor)); - } - } catch (JSONException e) { - throw new HBaseRestException("Error Parsing json input", e); - } - - return columns; - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.parser.IHBaseRestParser#getScannerDescriptor - * (byte[]) - */ - public ScannerDescriptor getScannerDescriptor(byte[] input) - throws HBaseRestException { - JSONObject scannerDescriptor; - JSONArray columnArray; - - byte[][] columns = null; - long timestamp; - byte[] startRow; - byte[] stopRow; - String filters; - - try { - scannerDescriptor = new JSONObject(new String(input)); - - columnArray = scannerDescriptor.optJSONArray(RESTConstants.COLUMNS); - timestamp = scannerDescriptor.optLong(RESTConstants.SCANNER_TIMESTAMP); - startRow = Bytes.toBytes(scannerDescriptor.optString( - RESTConstants.SCANNER_START_ROW, "")); - stopRow = Bytes.toBytes(scannerDescriptor.optString( - RESTConstants.SCANNER_STOP_ROW, "")); - filters = scannerDescriptor.optString(RESTConstants.SCANNER_FILTER); - - if (columnArray != null) { - columns = new byte[columnArray.length()][]; - for (int i = 0; i < columnArray.length(); i++) { - columns[i] = Bytes.toBytes(columnArray.optString(i)); - } - } - - return new ScannerDescriptor(columns, timestamp, startRow, stopRow, - filters); - } catch (JSONException e) { - throw new HBaseRestException("error parsing json string", e); - } - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.parser.IHBaseRestParser#getRowUpdateDescriptor - * (byte[], byte[][]) - */ - public RowUpdateDescriptor getRowUpdateDescriptor(byte[] input, - byte[][] pathSegments) throws HBaseRestException { - - RowUpdateDescriptor rud = new RowUpdateDescriptor(); - JSONArray a; - - rud.setTableName(Bytes.toString(pathSegments[0])); - rud.setRowName(Bytes.toString(pathSegments[2])); - - try { - JSONObject updateObject = new JSONObject(new String(input)); - a = updateObject.getJSONArray(RESTConstants.COLUMNS); - for (int i = 0; i < a.length(); i++) { - rud.getColVals().put( - Bytes.toBytes(a.getJSONObject(i).getString(RESTConstants.NAME)), - org.apache.hadoop.hbase.util.Base64.decode(a.getJSONObject(i) - .getString(RESTConstants.VALUE))); - } - } catch (JSONException e) { - throw new HBaseRestException("Error parsing row update json", e); - } - return rud; - } - -} Index: src/java/org/apache/hadoop/hbase/rest/parser/HBaseRestParserFactory.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/parser/HBaseRestParserFactory.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/parser/HBaseRestParserFactory.java (working copy) @@ -24,9 +24,6 @@ import org.apache.hadoop.hbase.rest.Dispatcher.ContentType; -/** - * - */ public class HBaseRestParserFactory { private static final Map> parserMap = @@ -34,7 +31,6 @@ static { parserMap.put(ContentType.XML, XMLRestParser.class); - parserMap.put(ContentType.JSON, JsonRestParser.class); } public static IHBaseRestParser getParser(ContentType ct) { Index: src/java/org/apache/hadoop/hbase/rest/exception/HBaseRestException.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/exception/HBaseRestException.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/exception/HBaseRestException.java (working copy) @@ -19,8 +19,6 @@ */ package org.apache.hadoop.hbase.rest.exception; -import agilejson.TOJSON; - public class HBaseRestException extends Exception { /** @@ -59,12 +57,10 @@ innerMessage = message; } - @TOJSON public String getInnerClass() { return this.innerClass; } - @TOJSON public String getInnerMessage() { return this.innerMessage; } Index: src/java/org/apache/hadoop/hbase/rest/descriptors/ScannerIdentifier.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/descriptors/ScannerIdentifier.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/descriptors/ScannerIdentifier.java (working copy) @@ -23,8 +23,6 @@ import org.apache.hadoop.hbase.rest.serializer.IRestSerializer; import org.apache.hadoop.hbase.rest.serializer.ISerializable; -import agilejson.TOJSON; - /** * */ @@ -53,7 +51,6 @@ /** * @return the id */ - @TOJSON public Integer getId() { return id; } Index: src/java/org/apache/hadoop/hbase/rest/descriptors/RestCell.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/descriptors/RestCell.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/descriptors/RestCell.java (working copy) @@ -21,8 +21,6 @@ import org.apache.hadoop.hbase.io.Cell; -import agilejson.TOJSON; - /** * */ @@ -88,7 +86,6 @@ /** * @return the name */ - @TOJSON(base64=true) public byte[] getName() { return name; } Index: src/java/org/apache/hadoop/hbase/rest/Status.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/Status.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/Status.java (working copy) @@ -31,8 +31,6 @@ import org.apache.hadoop.hbase.rest.serializer.ISerializable; import org.apache.hadoop.hbase.util.Bytes; -import agilejson.TOJSON; - public class Status { @SuppressWarnings("unused") @@ -108,7 +106,6 @@ return statusCode; } - @TOJSON public Object getMessage() { return message; } @@ -124,17 +121,14 @@ reason = o; } - @TOJSON public int getStatusCode() { return statusCode; } - @TOJSON public boolean getError() { return error; } - @TOJSON public Object getMessage() { return reason; } Index: src/java/org/apache/hadoop/hbase/rest/DatabaseModel.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/DatabaseModel.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/DatabaseModel.java (working copy) @@ -30,8 +30,6 @@ import org.apache.hadoop.hbase.rest.serializer.IRestSerializer; import org.apache.hadoop.hbase.rest.serializer.ISerializable; -import agilejson.TOJSON; - public class DatabaseModel extends AbstractModel { @SuppressWarnings("unused") @@ -50,12 +48,10 @@ tables = a.listTables(); } - @TOJSON(prefixLength = 2) public boolean isMasterRunning() { return master_running; } - @TOJSON public HTableDescriptor[] getTables() { return tables; } Index: src/java/org/apache/hadoop/hbase/rest/serializer/RestSerializerFactory.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/serializer/RestSerializerFactory.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/serializer/RestSerializerFactory.java (working copy) @@ -44,9 +44,6 @@ case XML: serializer = new SimpleXMLSerializer(response); break; - case JSON: - serializer = new JSONSerializer(response); - break; default: serializer = new SimpleXMLSerializer(response); break; Index: src/java/org/apache/hadoop/hbase/rest/serializer/JSONSerializer.java =================================================================== --- src/java/org/apache/hadoop/hbase/rest/serializer/JSONSerializer.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/rest/serializer/JSONSerializer.java (working copy) @@ -1,213 +0,0 @@ -/** - * Copyright 2007 The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.rest.serializer; - -import javax.servlet.http.HttpServletResponse; - -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.io.Cell; -import org.apache.hadoop.hbase.io.RowResult; -import org.apache.hadoop.hbase.rest.DatabaseModel.DatabaseMetadata; -import org.apache.hadoop.hbase.rest.Status.StatusMessage; -import org.apache.hadoop.hbase.rest.TableModel.Regions; -import org.apache.hadoop.hbase.rest.descriptors.ScannerIdentifier; -import org.apache.hadoop.hbase.rest.descriptors.TimestampsDescriptor; -import org.apache.hadoop.hbase.rest.exception.HBaseRestException; - -import agilejson.JSON; - -/** - * - * Serializes objects into JSON strings and prints them back out on the output - * stream. It should be noted that this JSON implementation uses annotations on - * the objects to be serialized. - * - * Since these annotations are used to describe the serialization of the objects - * the only method that is implemented is writeOutput(Object o). The other - * methods in the interface do not need to be implemented. - */ -public class JSONSerializer extends AbstractRestSerializer { - - /** - * @param response - */ - public JSONSerializer(HttpServletResponse response) { - super(response, false); - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#writeOutput(java - * .lang.Object, javax.servlet.http.HttpServletResponse) - */ - public void writeOutput(Object o) throws HBaseRestException { - response.setContentType("application/json"); - - try { - // LOG.debug("At top of send data"); - String data = JSON.toJSON(o); - response.setContentLength(data.length()); - response.getWriter().println(data); - } catch (Exception e) { - // LOG.debug("Error sending data: " + e.toString()); - throw new HBaseRestException(e); - } - - } - - /* - * (non-Javadoc) - * - * @seeorg.apache.hadoop.hbase.rest.serializer.IRestSerializer# - * serializeColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) - */ - public void serializeColumnDescriptor(HColumnDescriptor column) - throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @seeorg.apache.hadoop.hbase.rest.serializer.IRestSerializer# - * serializeDatabaseMetadata - * (org.apache.hadoop.hbase.rest.DatabaseModel.DatabaseMetadata) - */ - public void serializeDatabaseMetadata(DatabaseMetadata databaseMetadata) - throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeRegionData - * (org.apache.hadoop.hbase.rest.TableModel.Regions) - */ - public void serializeRegionData(Regions regions) throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @seeorg.apache.hadoop.hbase.rest.serializer.IRestSerializer# - * serializeTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) - */ - public void serializeTableDescriptor(HTableDescriptor tableDescriptor) - throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeStatusMessage - * (org.apache.hadoop.hbase.rest.Status.StatusMessage) - */ - public void serializeStatusMessage(StatusMessage message) - throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @seeorg.apache.hadoop.hbase.rest.serializer.IRestSerializer# - * serializeScannerIdentifier(org.apache.hadoop.hbase.rest.ScannerIdentifier) - */ - public void serializeScannerIdentifier(ScannerIdentifier scannerIdentifier) - throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeRowResult - * (org.apache.hadoop.hbase.io.RowResult) - */ - public void serializeRowResult(RowResult rowResult) throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeRowResultArray - * (org.apache.hadoop.hbase.io.RowResult[]) - */ - public void serializeRowResultArray(RowResult[] rows) - throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeCell(org - * .apache.hadoop.hbase.io.Cell) - */ - public void serializeCell(Cell cell) throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeCellArray - * (org.apache.hadoop.hbase.io.Cell[]) - */ - public void serializeCellArray(Cell[] cells) throws HBaseRestException { - // No implementation needed for the JSON serializer - - } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hbase.rest.serializer.IRestSerializer#serializeTimestamps - * (org.apache.hadoop.hbase.rest.RowModel.TimestampsDescriptor) - */ - public void serializeTimestamps(TimestampsDescriptor timestampsDescriptor) - throws HBaseRestException { - // No implementation needed for the JSON serializer - } - -} Index: src/java/org/apache/hadoop/hbase/HTableDescriptor.java =================================================================== --- src/java/org/apache/hadoop/hbase/HTableDescriptor.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/HTableDescriptor.java (working copy) @@ -39,8 +39,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableComparable; -import agilejson.TOJSON; - /** * HTableDescriptor contains the name of an HTable, and its * column families. @@ -373,7 +371,6 @@ } /** @return name of table */ - @TOJSON public byte [] getName() { return name; } @@ -611,8 +608,7 @@ public Set getFamiliesKeys() { return Collections.unmodifiableSet(this.families.keySet()); } - - @TOJSON(fieldName = "columns") + public HColumnDescriptor[] getColumnFamilies() { return getFamilies().toArray(new HColumnDescriptor[0]); } Index: src/java/org/apache/hadoop/hbase/io/Cell.java =================================================================== --- src/java/org/apache/hadoop/hbase/io/Cell.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/io/Cell.java (working copy) @@ -39,8 +39,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; -import agilejson.TOJSON; - /** * Cell - Used to transport a cell value (byte[]) and the timestamp it was * stored with together as a result for get and getRow methods. This promotes @@ -119,13 +117,11 @@ } /** @return the current cell's value */ - @TOJSON(base64=true) public byte[] getValue() { return valueMap.get(valueMap.firstKey()); } /** @return the current cell's timestamp */ - @TOJSON public long getTimestamp() { return valueMap.firstKey(); } Index: src/java/org/apache/hadoop/hbase/io/RowResult.java =================================================================== --- src/java/org/apache/hadoop/hbase/io/RowResult.java (revision 932666) +++ src/java/org/apache/hadoop/hbase/io/RowResult.java (working copy) @@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.Writable; -import agilejson.TOJSON; - /** * Holds row name and then a map of columns to cells. * @deprecated As of hbase 0.20.0, replaced by new Get/Put/Delete/Result-based API. @@ -73,7 +71,6 @@ * Get the row for this RowResult * @return the row */ - @TOJSON(base64=true) public byte [] getRow() { return row; } @@ -145,7 +142,6 @@ * * @return Cells */ - @TOJSON public RestCell[] getCells() { RestCell[] restCells = new RestCell[this.cells.size()]; int i = 0; Index: src/webapps/rest/WEB-INF/web.xml =================================================================== --- src/webapps/rest/WEB-INF/web.xml (revision 932666) +++ src/webapps/rest/WEB-INF/web.xml (working copy) @@ -1,8 +1,8 @@ - jsonrest + rest - Hbase JSONREST Interface + HBase REST Interface api api org.apache.hadoop.hbase.rest.Dispatcher