diff --git bin/hbase bin/hbase index 4eeb145..02944f9 100755 --- bin/hbase +++ bin/hbase @@ -86,8 +86,7 @@ if [ $# = 0 ]; then echo " zookeeper run a Zookeeper server" echo " rest run an HBase REST server" echo " thrift run the HBase Thrift server" - echo " thrift2 run the HBase Thrift2 server" - echo " avro run an HBase Avro server" + echo " thrift2 run the HBase Thrift2 server" echo "" echo "PACKAGE MANAGEMENT" echo " classpath dump hbase CLASSPATH" @@ -313,11 +312,6 @@ elif [ "$COMMAND" = "rest" ] ; then if [ "$1" != "stop" ] ; then HBASE_OPTS="$HBASE_OPTS $HBASE_REST_OPTS" fi -elif [ "$COMMAND" = "avro" ] ; then - CLASS='org.apache.hadoop.hbase.avro.AvroServer' - if [ "$1" != "stop" ] ; then - HBASE_OPTS="$HBASE_OPTS $HBASE_AVRO_OPTS" - fi elif [ "$COMMAND" = "zookeeper" ] ; then CLASS='org.apache.hadoop.hbase.zookeeper.HQuorumPeer' if [ "$1" != "stop" ] ; then diff --git hbase-server/pom.xml hbase-server/pom.xml index 54aa41b..768c25a 100644 --- hbase-server/pom.xml +++ hbase-server/pom.xml @@ -57,24 +57,6 @@ - - org.apache.avro - avro-maven-plugin - ${avro.version} - - - generate-avro-sources - generate-sources - - schema - protocol - - - - - ${project.build.directory}/generated-sources/java - - @@ -252,20 +234,6 @@ - - - org.apache.avro - avro-maven-plugin - [1.5.3,) - - schema - protocol - - - - - - @@ -351,26 +319,6 @@ log4j - org.apache.avro - avro - ${avro.version} - - - com.thoughtworks.paranamer - paranamer - - - com.thoughtworks.paranamer - paranamer-ant - - - - - org.apache.avro - avro-ipc - ${avro.version} - - org.apache.zookeeper zookeeper diff --git hbase-server/src/main/avro/hbase.avpr hbase-server/src/main/avro/hbase.avpr deleted file mode 100644 index 61208fd..0000000 --- hbase-server/src/main/avro/hbase.avpr +++ /dev/null @@ -1,609 +0,0 @@ -{ - "protocol" : "HBase", - "namespace" : "org.apache.hadoop.hbase.avro.generated", - "types" : [ { - "type" : "record", - "name" : "AServerAddress", - "fields" : [ { - "name" : "hostname", - "type" : "string" - }, { - "name" : "inetSocketAddress", - "type" : "string" - }, { - "name" : "port", - "type" : "int" - } ] - }, { - "type" : "record", - "name" : "ARegionLoad", - "fields" : [ { - "name" : "memStoreSizeMB", - "type" : "int" - }, { - "name" : "name", - "type" : "bytes" - }, { - "name" : "storefileIndexSizeMB", - "type" : "int" - }, { - "name" : "storefiles", - "type" : "int" - }, { - "name" : "storefileSizeMB", - "type" : "int" - }, { - "name" : "stores", - "type" : "int" - } ] - }, { - "type" : "record", - "name" : "AServerLoad", - "fields" : [ { - "name" : "load", - "type" : "int" - }, { - "name" : "maxHeapMB", - "type" : "int" - }, { - "name" : "memStoreSizeInMB", - "type" : "int" - }, { - "name" : "numberOfRegions", - "type" : "int" - }, { - "name" : "numberOfRequests", - "type" : "int" - }, { - "name" : "regionsLoad", - "type" : { - "type" : "array", - "items" : "ARegionLoad" - } - }, { - "name" : "storefileIndexSizeInMB", - "type" : "int" - }, { - "name" : "storefiles", - "type" : "int" - }, { - "name" : "storefileSizeInMB", - "type" : "int" - }, { - "name" : "usedHeapMB", - "type" : "int" - } ] - }, { - "type" : "record", - "name" : "AServerInfo", - "fields" : [ { - "name" : "infoPort", - "type" : "int" - }, { - "name" : "load", - "type" : "AServerLoad" - }, { - "name" : "serverAddress", - "type" : "AServerAddress" - }, { - "name" : "serverName", - "type" : "string" - }, { - "name" : "startCode", - "type" : "long" - } ] - }, { - "type" : "record", - "name" : "AClusterStatus", - "fields" : [ { - "name" : "averageLoad", - "type" : "double" - }, { - "name" : "deadServerNames", - "type" : { - "type" : "array", - "items" : "string" - } - }, { - "name" : "deadServers", - "type" : "int" - }, { - "name" : "hbaseVersion", - "type" : "string" - }, { - "name" : "regionsCount", - "type" : "int" - }, { - "name" : "requestsCount", - "type" : "int" - }, { - "name" : "serverInfos", - "type" : { - "type" : "array", - "items" : "AServerInfo" - } - }, { - "name" : "servers", - "type" : "int" - } ] - }, { - "type" : "enum", - "name" : "ACompressionAlgorithm", - "symbols" : [ "LZO", "GZ", "NONE" ] - }, { - "type" : "record", - "name" : "AFamilyDescriptor", - "fields" : [ { - "name" : "name", - "type" : "bytes" - }, { - "name" : "compression", - "type" : [ "ACompressionAlgorithm", "null" ] - }, { - "name" : "maxVersions", - "type" : [ "int", "null" ] - }, { - "name" : "blocksize", - "type" : [ "int", "null" ] - }, { - "name" : "inMemory", - "type" : [ "boolean", "null" ] - }, { - "name" : "timeToLive", - "type" : [ "int", "null" ] - }, { - "name" : "blockCacheEnabled", - "type" : [ "boolean", "null" ] - } ] - }, { - "type" : "record", - "name" : "ATableDescriptor", - "fields" : [ { - "name" : "name", - "type" : "bytes" - }, { - "name" : "families", - "type" : [ { - "type" : "array", - "items" : "AFamilyDescriptor" - }, "null" ] - }, { - "name" : "maxFileSize", - "type" : [ "long", "null" ] - }, { - "name" : "memStoreFlushSize", - "type" : [ "long", "null" ] - }, { - "name" : "rootRegion", - "type" : [ "boolean", "null" ] - }, { - "name" : "metaRegion", - "type" : [ "boolean", "null" ] - }, { - "name" : "metaTable", - "type" : [ "boolean", "null" ] - }, { - "name" : "readOnly", - "type" : [ "boolean", "null" ] - }, { - "name" : "deferredLogFlush", - "type" : [ "boolean", "null" ] - } ] - }, { - "type" : "record", - "name" : "AColumn", - "fields" : [ { - "name" : "family", - "type" : "bytes" - }, { - "name" : "qualifier", - "type" : [ "bytes", "null" ] - } ] - }, { - "type" : "record", - "name" : "ATimeRange", - "fields" : [ { - "name" : "minStamp", - "type" : "long" - }, { - "name" : "maxStamp", - "type" : "long" - } ] - }, { - "type" : "record", - "name" : "AGet", - "fields" : [ { - "name" : "row", - "type" : "bytes" - }, { - "name" : "columns", - "type" : [ { - "type" : "array", - "items" : "AColumn" - }, "null" ] - }, { - "name" : "timestamp", - "type" : [ "long", "null" ] - }, { - "name" : "timerange", - "type" : [ "ATimeRange", "null" ] - }, { - "name" : "maxVersions", - "type" : [ "int", "null" ] - } ] - }, { - "type" : "record", - "name" : "AResultEntry", - "fields" : [ { - "name" : "family", - "type" : "bytes" - }, { - "name" : "qualifier", - "type" : "bytes" - }, { - "name" : "value", - "type" : "bytes" - }, { - "name" : "timestamp", - "type" : "long" - } ] - }, { - "type" : "record", - "name" : "AResult", - "fields" : [ { - "name" : "row", - "type" : "bytes" - }, { - "name" : "entries", - "type" : { - "type" : "array", - "items" : "AResultEntry" - } - } ] - }, { - "type" : "record", - "name" : "AColumnValue", - "fields" : [ { - "name" : "family", - "type" : "bytes" - }, { - "name" : "qualifier", - "type" : "bytes" - }, { - "name" : "value", - "type" : "bytes" - }, { - "name" : "timestamp", - "type" : [ "long", "null" ] - } ] - }, { - "type" : "record", - "name" : "APut", - "fields" : [ { - "name" : "row", - "type" : "bytes" - }, { - "name" : "columnValues", - "type" : { - "type" : "array", - "items" : "AColumnValue" - } - } ] - }, { - "type" : "record", - "name" : "ADelete", - "fields" : [ { - "name" : "row", - "type" : "bytes" - }, { - "name" : "columns", - "type" : [ { - "type" : "array", - "items" : "AColumn" - }, "null" ] - } ] - }, { - "type" : "record", - "name" : "AScan", - "fields" : [ { - "name" : "startRow", - "type" : [ "bytes", "null" ] - }, { - "name" : "stopRow", - "type" : [ "bytes", "null" ] - }, { - "name" : "columns", - "type" : [ { - "type" : "array", - "items" : "AColumn" - }, "null" ] - }, { - "name" : "timestamp", - "type" : [ "long", "null" ] - }, { - "name" : "timerange", - "type" : [ "ATimeRange", "null" ] - }, { - "name" : "maxVersions", - "type" : [ "int", "null" ] - } ] - }, { - "type" : "error", - "name" : "AIOError", - "fields" : [ { - "name" : "message", - "type" : "string" - } ] - }, { - "type" : "error", - "name" : "AIllegalArgument", - "fields" : [ { - "name" : "message", - "type" : "string" - } ] - }, { - "type" : "error", - "name" : "ATableExists", - "fields" : [ { - "name" : "message", - "type" : "string" - } ] - }, { - "type" : "error", - "name" : "AMasterNotRunning", - "fields" : [ { - "name" : "message", - "type" : "string" - } ] - } ], - "messages" : { - "getHBaseVersion" : { - "request" : [ ], - "response" : "string", - "errors" : [ "AIOError" ] - }, - "getClusterStatus" : { - "request" : [ ], - "response" : "AClusterStatus", - "errors" : [ "AIOError" ] - }, - "listTables" : { - "request" : [ ], - "response" : { - "type" : "array", - "items" : "ATableDescriptor" - }, - "errors" : [ "AIOError" ] - }, - "describeTable" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "ATableDescriptor", - "errors" : [ "AIOError" ] - }, - "isTableEnabled" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "boolean", - "errors" : [ "AIOError" ] - }, - "tableExists" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "boolean", - "errors" : [ "AIOError" ] - }, - "describeFamily" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "family", - "type" : "bytes" - } ], - "response" : "AFamilyDescriptor", - "errors" : [ "AIOError" ] - }, - "createTable" : { - "request" : [ { - "name" : "table", - "type" : "ATableDescriptor" - } ], - "response" : "null", - "errors" : [ "AIOError", "AIllegalArgument", "ATableExists", "AMasterNotRunning" ] - }, - "deleteTable" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "modifyTable" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "tableDescriptor", - "type" : "ATableDescriptor" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "enableTable" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "disableTable" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "flush" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "split" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "addFamily" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "family", - "type" : "AFamilyDescriptor" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "deleteFamily" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "family", - "type" : "bytes" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "modifyFamily" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "familyName", - "type" : "bytes" - }, { - "name" : "familyDescriptor", - "type" : "AFamilyDescriptor" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "get" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "get", - "type" : "AGet" - } ], - "response" : "AResult", - "errors" : [ "AIOError" ] - }, - "exists" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "get", - "type" : "AGet" - } ], - "response" : "boolean", - "errors" : [ "AIOError" ] - }, - "put" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "put", - "type" : "APut" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "delete" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "delete", - "type" : "ADelete" - } ], - "response" : "null", - "errors" : [ "AIOError" ] - }, - "incrementColumnValue" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "row", - "type" : "bytes" - }, { - "name" : "family", - "type" : "bytes" - }, { - "name" : "qualifier", - "type" : "bytes" - }, { - "name" : "amount", - "type" : "long" - }, { - "name" : "writeToWAL", - "type" : "boolean" - } ], - "response" : "long", - "errors" : [ "AIOError" ] - }, - "scannerOpen" : { - "request" : [ { - "name" : "table", - "type" : "bytes" - }, { - "name" : "scan", - "type" : "AScan" - } ], - "response" : "int", - "errors" : [ "AIOError" ] - }, - "scannerClose" : { - "request" : [ { - "name" : "scannerId", - "type" : "int" - } ], - "response" : "null", - "errors" : [ "AIOError", "AIllegalArgument" ] - }, - "scannerGetRows" : { - "request" : [ { - "name" : "scannerId", - "type" : "int" - }, { - "name" : "numberOfRows", - "type" : "int" - } ], - "response" : { - "type" : "array", - "items" : "AResult" - }, - "errors" : [ "AIOError", "AIllegalArgument" ] - } - } -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java hbase-server/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java deleted file mode 100644 index d96867d..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java +++ /dev/null @@ -1,618 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.avro; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.HashMap; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericArray; -import org.apache.avro.generic.GenericData; -import org.apache.avro.ipc.HttpServer; -import org.apache.avro.ipc.specific.SpecificResponder; -import org.apache.avro.util.Utf8; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.MasterNotRunningException; -import org.apache.hadoop.hbase.TableExistsException; -import org.apache.hadoop.hbase.TableNotFoundException; -import org.apache.hadoop.hbase.avro.generated.AClusterStatus; -import org.apache.hadoop.hbase.avro.generated.ADelete; -import org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor; -import org.apache.hadoop.hbase.avro.generated.AGet; -import org.apache.hadoop.hbase.avro.generated.AIOError; -import org.apache.hadoop.hbase.avro.generated.AIllegalArgument; -import org.apache.hadoop.hbase.avro.generated.AMasterNotRunning; -import org.apache.hadoop.hbase.avro.generated.APut; -import org.apache.hadoop.hbase.avro.generated.AResult; -import org.apache.hadoop.hbase.avro.generated.AScan; -import org.apache.hadoop.hbase.avro.generated.ATableDescriptor; -import org.apache.hadoop.hbase.avro.generated.ATableExists; -import org.apache.hadoop.hbase.avro.generated.HBase; -import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HTableInterface; -import org.apache.hadoop.hbase.client.HTablePool; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.util.Bytes; - -/** - * Start an Avro server - */ -@Deprecated -@InterfaceAudience.Private -public class AvroServer { - - /** - * The HBaseImpl is a glue object that connects Avro RPC calls to the - * HBase client API primarily defined in the HBaseAdmin and HTable objects. - */ - public static class HBaseImpl implements HBase { - // - // PROPERTIES - // - protected Configuration conf = null; - protected HBaseAdmin admin = null; - protected HTablePool htablePool = null; - protected final Log LOG = LogFactory.getLog(this.getClass().getName()); - - // nextScannerId and scannerMap are used to manage scanner state - protected int nextScannerId = 0; - protected HashMap scannerMap = null; - - // - // UTILITY METHODS - // - - /** - * Assigns a unique ID to the scanner and adds the mapping to an internal - * hash-map. - * - * @param scanner - * @return integer scanner id - */ - protected synchronized int addScanner(ResultScanner scanner) { - int id = nextScannerId++; - scannerMap.put(id, scanner); - return id; - } - - /** - * Returns the scanner associated with the specified ID. - * - * @param id - * @return a Scanner, or null if ID was invalid. - */ - protected synchronized ResultScanner getScanner(int id) { - return scannerMap.get(id); - } - - /** - * Removes the scanner associated with the specified ID from the internal - * id->scanner hash-map. - * - * @param id - * @return a Scanner, or null if ID was invalid. - */ - protected synchronized ResultScanner removeScanner(int id) { - return scannerMap.remove(id); - } - - // - // CTOR METHODS - // - - // TODO(hammer): figure out appropriate setting of maxSize for htablePool - /** - * Constructs an HBaseImpl object. - * @throws IOException - */ - HBaseImpl() throws IOException { - this(HBaseConfiguration.create()); - } - - HBaseImpl(final Configuration c) throws IOException { - conf = c; - admin = new HBaseAdmin(conf); - htablePool = new HTablePool(conf, 10); - scannerMap = new HashMap(); - } - - // - // SERVICE METHODS - // - - // TODO(hammer): Investigate use of the Command design pattern - - // - // Cluster metadata - // - - public Utf8 getHBaseVersion() throws AIOError { - try { - return new Utf8(admin.getClusterStatus().getHBaseVersion()); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public AClusterStatus getClusterStatus() throws AIOError { - try { - return AvroUtil.csToACS(admin.getClusterStatus()); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public GenericArray listTables() throws AIOError { - try { - HTableDescriptor[] tables = admin.listTables(); - Schema atdSchema = Schema.createArray(ATableDescriptor.SCHEMA$); - GenericData.Array result = null; - result = new GenericData.Array(tables.length, atdSchema); - for (HTableDescriptor table : tables) { - result.add(AvroUtil.htdToATD(table)); - } - return result; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // - // Table metadata - // - - // TODO(hammer): Handle the case where the table does not exist explicitly? - public ATableDescriptor describeTable(ByteBuffer table) throws AIOError { - try { - return AvroUtil.htdToATD(admin.getTableDescriptor(Bytes.toBytes(table))); - } catch (TableNotFoundException e) { - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public boolean isTableEnabled(ByteBuffer table) throws AIOError { - try { - return admin.isTableEnabled(Bytes.toBytes(table)); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public boolean tableExists(ByteBuffer table) throws AIOError { - try { - return admin.tableExists(Bytes.toBytes(table)); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // - // Family metadata - // - - // TODO(hammer): Handle the case where the family does not exist explicitly? - public AFamilyDescriptor describeFamily(ByteBuffer table, ByteBuffer family) throws AIOError { - try { - HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes(table)); - return AvroUtil.hcdToAFD(htd.getFamily(Bytes.toBytes(family))); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // - // Table admin - // - - public Void createTable(ATableDescriptor table) throws AIOError, - AIllegalArgument, - ATableExists, - AMasterNotRunning { - try { - admin.createTable(AvroUtil.atdToHTD(table)); - return null; - } catch (IllegalArgumentException e) { - AIllegalArgument iae = new AIllegalArgument(); - iae.message = new Utf8(e.getMessage()); - throw iae; - } catch (TableExistsException e) { - ATableExists tee = new ATableExists(); - tee.message = new Utf8(e.getMessage()); - throw tee; - } catch (MasterNotRunningException e) { - AMasterNotRunning mnre = new AMasterNotRunning(); - mnre.message = new Utf8(e.getMessage()); - throw mnre; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // Note that disable, flush and major compaction of .META. needed in client - // TODO(hammer): more selective cache dirtying than flush? - public Void deleteTable(ByteBuffer table) throws AIOError { - try { - admin.deleteTable(Bytes.toBytes(table)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // NB: Asynchronous operation - public Void modifyTable(ByteBuffer tableName, ATableDescriptor tableDescriptor) throws AIOError { - try { - admin.modifyTable(Bytes.toBytes(tableName), - AvroUtil.atdToHTD(tableDescriptor)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public Void enableTable(ByteBuffer table) throws AIOError { - try { - admin.enableTable(Bytes.toBytes(table)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public Void disableTable(ByteBuffer table) throws AIOError { - try { - admin.disableTable(Bytes.toBytes(table)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // NB: Asynchronous operation - public Void flush(ByteBuffer table) throws AIOError { - try { - admin.flush(Bytes.toBytes(table)); - return null; - } catch (InterruptedException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // NB: Asynchronous operation - public Void split(ByteBuffer table) throws AIOError { - try { - admin.split(Bytes.toBytes(table)); - return null; - } catch (InterruptedException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // - // Family admin - // - - public Void addFamily(ByteBuffer table, AFamilyDescriptor family) throws AIOError { - try { - admin.addColumn(Bytes.toBytes(table), - AvroUtil.afdToHCD(family)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // NB: Asynchronous operation - public Void deleteFamily(ByteBuffer table, ByteBuffer family) throws AIOError { - try { - admin.deleteColumn(Bytes.toBytes(table), Bytes.toBytes(family)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // NB: Asynchronous operation - public Void modifyFamily(ByteBuffer table, ByteBuffer familyName, AFamilyDescriptor familyDescriptor) throws AIOError { - try { - admin.modifyColumn(Bytes.toBytes(table), AvroUtil.afdToHCD(familyDescriptor)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - // - // Single-row DML - // - - // TODO(hammer): Java with statement for htablepool concision? - // TODO(hammer): Can Get have timestamp and timerange simultaneously? - // TODO(hammer): Do I need to catch the RuntimeException of getTable? - // TODO(hammer): Handle gets with no results - // TODO(hammer): Uses exists(Get) to ensure columns exist - public AResult get(ByteBuffer table, AGet aget) throws AIOError { - HTableInterface htable = htablePool.getTable(Bytes.toBytes(table)); - try { - return AvroUtil.resultToAResult(htable.get(AvroUtil.agetToGet(aget))); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } finally { - try { - htablePool.putTable(htable); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - public boolean exists(ByteBuffer table, AGet aget) throws AIOError { - HTableInterface htable = htablePool.getTable(Bytes.toBytes(table)); - try { - return htable.exists(AvroUtil.agetToGet(aget)); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } finally { - try { - htablePool.putTable(htable); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - public Void put(ByteBuffer table, APut aput) throws AIOError { - HTableInterface htable = htablePool.getTable(Bytes.toBytes(table)); - try { - htable.put(AvroUtil.aputToPut(aput)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } finally { - try { - htablePool.putTable(htable); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - public Void delete(ByteBuffer table, ADelete adelete) throws AIOError { - HTableInterface htable = htablePool.getTable(Bytes.toBytes(table)); - try { - htable.delete(AvroUtil.adeleteToDelete(adelete)); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } finally { - try { - htablePool.putTable(htable); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - public long incrementColumnValue(ByteBuffer table, ByteBuffer row, ByteBuffer family, ByteBuffer qualifier, long amount, boolean writeToWAL) throws AIOError { - HTableInterface htable = htablePool.getTable(Bytes.toBytes(table)); - try { - return htable.incrementColumnValue(Bytes.toBytes(row), Bytes.toBytes(family), Bytes.toBytes(qualifier), amount, writeToWAL); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } finally { - try { - htablePool.putTable(htable); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - // - // Multi-row DML - // - - public int scannerOpen(ByteBuffer table, AScan ascan) throws AIOError { - HTableInterface htable = htablePool.getTable(Bytes.toBytes(table)); - try { - Scan scan = AvroUtil.ascanToScan(ascan); - return addScanner(htable.getScanner(scan)); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } finally { - try { - htablePool.putTable(htable); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - public Void scannerClose(int scannerId) throws AIOError, AIllegalArgument { - try { - ResultScanner scanner = getScanner(scannerId); - if (scanner == null) { - AIllegalArgument aie = new AIllegalArgument(); - aie.message = new Utf8("scanner ID is invalid: " + scannerId); - throw aie; - } - scanner.close(); - removeScanner(scannerId); - return null; - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - - public GenericArray scannerGetRows(int scannerId, int numberOfRows) throws AIOError, AIllegalArgument { - try { - ResultScanner scanner = getScanner(scannerId); - if (scanner == null) { - AIllegalArgument aie = new AIllegalArgument(); - aie.message = new Utf8("scanner ID is invalid: " + scannerId); - throw aie; - } - return AvroUtil.resultsToAResults(scanner.next(numberOfRows)); - } catch (IOException e) { - AIOError ioe = new AIOError(); - ioe.message = new Utf8(e.getMessage()); - throw ioe; - } - } - } - - // - // MAIN PROGRAM - // - - private static void printUsageAndExit() { - printUsageAndExit(null); - } - - private static void printUsageAndExit(final String message) { - if (message != null) { - System.err.println(message); - } - System.out.println("Usage: java org.apache.hadoop.hbase.avro.AvroServer " + - "--help | [--port=PORT] start"); - System.out.println("Arguments:"); - System.out.println(" start Start Avro server"); - System.out.println(" stop Stop Avro server"); - System.out.println("Options:"); - System.out.println(" port Port to listen on. Default: 9090"); - System.out.println(" help Print this message and exit"); - System.exit(0); - } - - protected static void doMain(final String[] args) throws Exception { - if (args.length < 1) { - printUsageAndExit(); - } - int port = 9090; - final String portArgKey = "--port="; - for (String cmd: args) { - if (cmd.startsWith(portArgKey)) { - port = Integer.parseInt(cmd.substring(portArgKey.length())); - continue; - } else if (cmd.equals("--help") || cmd.equals("-h")) { - printUsageAndExit(); - } else if (cmd.equals("start")) { - continue; - } else if (cmd.equals("stop")) { - printUsageAndExit("To shutdown the Avro server run " + - "bin/hbase-daemon.sh stop avro or send a kill signal to " + - "the Avro server pid"); - } - - // Print out usage if we get to here. - printUsageAndExit(); - } - Log LOG = LogFactory.getLog("AvroServer"); - LOG.info("starting HBase Avro server on port " + Integer.toString(port)); - SpecificResponder r = new SpecificResponder(HBase.class, new HBaseImpl()); - HttpServer server = new HttpServer(r, port); - server.start(); - server.join(); - } - - // TODO(hammer): Look at Cassandra's daemonization and integration with JSVC - // TODO(hammer): Don't eat it after a single exception - // TODO(hammer): Figure out why we do doMain() - // TODO(hammer): Figure out if we want String[] or String [] syntax - public static void main(String[] args) throws Exception { - doMain(args); - } -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java deleted file mode 100644 index 28aa627..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java +++ /dev/null @@ -1,417 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.avro; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Collection; -import java.util.List; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericArray; -import org.apache.avro.generic.GenericData; -import org.apache.avro.util.Utf8; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.ClusterStatus; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HServerAddress; -import org.apache.hadoop.hbase.ServerLoad; -import org.apache.hadoop.hbase.RegionLoad; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.avro.generated.AClusterStatus; -import org.apache.hadoop.hbase.avro.generated.AColumn; -import org.apache.hadoop.hbase.avro.generated.AColumnValue; -import org.apache.hadoop.hbase.avro.generated.ACompressionAlgorithm; -import org.apache.hadoop.hbase.avro.generated.ADelete; -import org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor; -import org.apache.hadoop.hbase.avro.generated.AGet; -import org.apache.hadoop.hbase.avro.generated.AIllegalArgument; -import org.apache.hadoop.hbase.avro.generated.APut; -import org.apache.hadoop.hbase.avro.generated.ARegionLoad; -import org.apache.hadoop.hbase.avro.generated.AResult; -import org.apache.hadoop.hbase.avro.generated.AResultEntry; -import org.apache.hadoop.hbase.avro.generated.AScan; -import org.apache.hadoop.hbase.avro.generated.AServerAddress; -import org.apache.hadoop.hbase.avro.generated.AServerInfo; -import org.apache.hadoop.hbase.avro.generated.AServerLoad; -import org.apache.hadoop.hbase.avro.generated.ATableDescriptor; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.hfile.Compression; -import org.apache.hadoop.hbase.util.Bytes; - -@Deprecated -@InterfaceAudience.Private -public class AvroUtil { - - // - // Cluster metadata - // - - static public AServerAddress hsaToASA(HServerAddress hsa) throws IOException { - AServerAddress asa = new AServerAddress(); - asa.hostname = new Utf8(hsa.getHostname()); - asa.inetSocketAddress = new Utf8(hsa.getInetSocketAddress().toString()); - asa.port = hsa.getPort(); - return asa; - } - - static public ARegionLoad hrlToARL(RegionLoad rl) throws IOException { - ARegionLoad arl = new ARegionLoad(); - arl.memStoreSizeMB = rl.getMemStoreSizeMB(); - arl.name = ByteBuffer.wrap(rl.getName()); - arl.storefileIndexSizeMB = rl.getStorefileIndexSizeMB(); - arl.storefiles = rl.getStorefiles(); - arl.storefileSizeMB = rl.getStorefileSizeMB(); - arl.stores = rl.getStores(); - return arl; - } - - static public AServerLoad hslToASL(ServerLoad sl) throws IOException { - AServerLoad asl = new AServerLoad(); - asl.load = sl.getLoad(); - asl.maxHeapMB = sl.getMaxHeapMB(); - asl.memStoreSizeInMB = sl.getMemstoreSizeInMB(); - asl.numberOfRegions = sl.getNumberOfRegions(); - asl.numberOfRequests = sl.getNumberOfRequests(); - - Collection regionLoads = sl.getRegionsLoad().values(); - Schema s = Schema.createArray(ARegionLoad.SCHEMA$); - GenericData.Array aregionLoads = null; - if (regionLoads != null) { - aregionLoads = new GenericData.Array(regionLoads.size(), s); - for (RegionLoad rl : regionLoads) { - aregionLoads.add(hrlToARL(rl)); - } - } else { - aregionLoads = new GenericData.Array(0, s); - } - asl.regionsLoad = aregionLoads; - - asl.storefileIndexSizeInMB = sl.getStorefileIndexSizeInMB(); - asl.storefiles = sl.getStorefiles(); - asl.storefileSizeInMB = sl.getStorefileSizeInMB(); - asl.usedHeapMB = sl.getUsedHeapMB(); - return asl; - } - - static public AServerInfo hsiToASI(ServerName sn, ServerLoad sl) throws IOException { - AServerInfo asi = new AServerInfo(); - asi.infoPort = -1; - asi.load = hslToASL(sl); - asi.serverAddress = hsaToASA(new HServerAddress(sn.getHostname(), sn.getPort())); - asi.serverName = new Utf8(sn.toString()); - asi.startCode = sn.getStartcode(); - return asi; - } - - static public AClusterStatus csToACS(ClusterStatus cs) throws IOException { - AClusterStatus acs = new AClusterStatus(); - acs.averageLoad = cs.getAverageLoad(); - Collection deadServerNames = cs.getDeadServerNames(); - Schema stringArraySchema = Schema.createArray(Schema.create(Schema.Type.STRING)); - GenericData.Array adeadServerNames = null; - if (deadServerNames != null) { - adeadServerNames = new GenericData.Array(deadServerNames.size(), stringArraySchema); - for (ServerName deadServerName : deadServerNames) { - adeadServerNames.add(new Utf8(deadServerName.toString())); - } - } else { - adeadServerNames = new GenericData.Array(0, stringArraySchema); - } - acs.deadServerNames = adeadServerNames; - acs.deadServers = cs.getDeadServers(); - acs.hbaseVersion = new Utf8(cs.getHBaseVersion()); - acs.regionsCount = cs.getRegionsCount(); - acs.requestsCount = cs.getRequestsCount(); - Collection hserverInfos = cs.getServers(); - Schema s = Schema.createArray(AServerInfo.SCHEMA$); - GenericData.Array aserverInfos = null; - if (hserverInfos != null) { - aserverInfos = new GenericData.Array(hserverInfos.size(), s); - for (ServerName hsi : hserverInfos) { - aserverInfos.add(hsiToASI(hsi, cs.getLoad(hsi))); - } - } else { - aserverInfos = new GenericData.Array(0, s); - } - acs.serverInfos = aserverInfos; - acs.servers = cs.getServers().size(); - return acs; - } - - // - // Table metadata - // - - static public ATableDescriptor htdToATD(HTableDescriptor table) throws IOException { - ATableDescriptor atd = new ATableDescriptor(); - atd.name = ByteBuffer.wrap(table.getName()); - Collection families = table.getFamilies(); - Schema afdSchema = Schema.createArray(AFamilyDescriptor.SCHEMA$); - GenericData.Array afamilies = null; - if (families.size() > 0) { - afamilies = new GenericData.Array(families.size(), afdSchema); - for (HColumnDescriptor hcd : families) { - AFamilyDescriptor afamily = hcdToAFD(hcd); - afamilies.add(afamily); - } - } else { - afamilies = new GenericData.Array(0, afdSchema); - } - atd.families = afamilies; - atd.maxFileSize = table.getMaxFileSize(); - atd.memStoreFlushSize = table.getMemStoreFlushSize(); - atd.rootRegion = table.isRootRegion(); - atd.metaRegion = table.isMetaRegion(); - atd.metaTable = table.isMetaTable(); - atd.readOnly = table.isReadOnly(); - atd.deferredLogFlush = table.isDeferredLogFlush(); - return atd; - } - - static public HTableDescriptor atdToHTD(ATableDescriptor atd) throws IOException, AIllegalArgument { - HTableDescriptor htd = new HTableDescriptor(Bytes.toBytes(atd.name)); - if (atd.families != null && atd.families.size() > 0) { - for (AFamilyDescriptor afd : atd.families) { - htd.addFamily(afdToHCD(afd)); - } - } - if (atd.maxFileSize != null) { - htd.setMaxFileSize(atd.maxFileSize); - } - if (atd.memStoreFlushSize != null) { - htd.setMemStoreFlushSize(atd.memStoreFlushSize); - } - if (atd.readOnly != null) { - htd.setReadOnly(atd.readOnly); - } - if (atd.deferredLogFlush != null) { - htd.setDeferredLogFlush(atd.deferredLogFlush); - } - if (atd.rootRegion != null || atd.metaRegion != null || atd.metaTable != null) { - AIllegalArgument aie = new AIllegalArgument(); - aie.message = new Utf8("Can't set root or meta flag on create table."); - throw aie; - } - return htd; - } - - // - // Family metadata - // - - static public AFamilyDescriptor hcdToAFD(HColumnDescriptor hcd) throws IOException { - AFamilyDescriptor afamily = new AFamilyDescriptor(); - afamily.name = ByteBuffer.wrap(hcd.getName()); - String compressionAlgorithm = hcd.getCompressionType().getName(); - if (compressionAlgorithm == "LZO") { - afamily.compression = ACompressionAlgorithm.LZO; - } else if (compressionAlgorithm == "GZ") { - afamily.compression = ACompressionAlgorithm.GZ; - } else { - afamily.compression = ACompressionAlgorithm.NONE; - } - afamily.maxVersions = hcd.getMaxVersions(); - afamily.blocksize = hcd.getBlocksize(); - afamily.inMemory = hcd.isInMemory(); - afamily.timeToLive = hcd.getTimeToLive(); - afamily.blockCacheEnabled = hcd.isBlockCacheEnabled(); - return afamily; - } - - static public HColumnDescriptor afdToHCD(AFamilyDescriptor afd) throws IOException { - HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(afd.name)); - - ACompressionAlgorithm compressionAlgorithm = afd.compression; - if (compressionAlgorithm == ACompressionAlgorithm.LZO) { - hcd.setCompressionType(Compression.Algorithm.LZO); - } else if (compressionAlgorithm == ACompressionAlgorithm.GZ) { - hcd.setCompressionType(Compression.Algorithm.GZ); - } else { - hcd.setCompressionType(Compression.Algorithm.NONE); - } - - if (afd.maxVersions != null) { - hcd.setMaxVersions(afd.maxVersions); - } - - if (afd.blocksize != null) { - hcd.setBlocksize(afd.blocksize); - } - - if (afd.inMemory != null) { - hcd.setInMemory(afd.inMemory); - } - - if (afd.timeToLive != null) { - hcd.setTimeToLive(afd.timeToLive); - } - - if (afd.blockCacheEnabled != null) { - hcd.setBlockCacheEnabled(afd.blockCacheEnabled); - } - return hcd; - } - - // - // Single-Row DML (Get) - // - - // TODO(hammer): More concise idiom than if not null assign? - static public Get agetToGet(AGet aget) throws IOException { - Get get = new Get(Bytes.toBytes(aget.row)); - if (aget.columns != null) { - for (AColumn acolumn : aget.columns) { - if (acolumn.qualifier != null) { - get.addColumn(Bytes.toBytes(acolumn.family), Bytes.toBytes(acolumn.qualifier)); - } else { - get.addFamily(Bytes.toBytes(acolumn.family)); - } - } - } - if (aget.timestamp != null) { - get.setTimeStamp(aget.timestamp); - } - if (aget.timerange != null) { - get.setTimeRange(aget.timerange.minStamp, aget.timerange.maxStamp); - } - if (aget.maxVersions != null) { - get.setMaxVersions(aget.maxVersions); - } - return get; - } - - // TODO(hammer): Pick one: Timestamp or TimeStamp - static public AResult resultToAResult(Result result) { - AResult aresult = new AResult(); - byte[] row = result.getRow(); - aresult.row = ByteBuffer.wrap(row != null ? row : new byte[1]); - Schema s = Schema.createArray(AResultEntry.SCHEMA$); - GenericData.Array entries = null; - List resultKeyValues = result.list(); - if (resultKeyValues != null && resultKeyValues.size() > 0) { - entries = new GenericData.Array(resultKeyValues.size(), s); - for (KeyValue resultKeyValue : resultKeyValues) { - AResultEntry entry = new AResultEntry(); - entry.family = ByteBuffer.wrap(resultKeyValue.getFamily()); - entry.qualifier = ByteBuffer.wrap(resultKeyValue.getQualifier()); - entry.value = ByteBuffer.wrap(resultKeyValue.getValue()); - entry.timestamp = resultKeyValue.getTimestamp(); - entries.add(entry); - } - } else { - entries = new GenericData.Array(0, s); - } - aresult.entries = entries; - return aresult; - } - - // - // Single-Row DML (Put) - // - - static public Put aputToPut(APut aput) throws IOException { - Put put = new Put(Bytes.toBytes(aput.row)); - for (AColumnValue acv : aput.columnValues) { - if (acv.timestamp != null) { - put.add(Bytes.toBytes(acv.family), - Bytes.toBytes(acv.qualifier), - acv.timestamp, - Bytes.toBytes(acv.value)); - } else { - put.add(Bytes.toBytes(acv.family), - Bytes.toBytes(acv.qualifier), - Bytes.toBytes(acv.value)); - } - } - return put; - } - - // - // Single-Row DML (Delete) - // - - static public Delete adeleteToDelete(ADelete adelete) throws IOException { - Delete delete = new Delete(Bytes.toBytes(adelete.row)); - if (adelete.columns != null) { - for (AColumn acolumn : adelete.columns) { - if (acolumn.qualifier != null) { - delete.deleteColumns(Bytes.toBytes(acolumn.family), Bytes.toBytes(acolumn.qualifier)); - } else { - delete.deleteFamily(Bytes.toBytes(acolumn.family)); - } - } - } - return delete; - } - - // - // Multi-row DML (Scan) - // - - static public Scan ascanToScan(AScan ascan) throws IOException { - Scan scan = new Scan(); - if (ascan.startRow != null) { - scan.setStartRow(Bytes.toBytes(ascan.startRow)); - } - if (ascan.stopRow != null) { - scan.setStopRow(Bytes.toBytes(ascan.stopRow)); - } - if (ascan.columns != null) { - for (AColumn acolumn : ascan.columns) { - if (acolumn.qualifier != null) { - scan.addColumn(Bytes.toBytes(acolumn.family), Bytes.toBytes(acolumn.qualifier)); - } else { - scan.addFamily(Bytes.toBytes(acolumn.family)); - } - } - } - if (ascan.timestamp != null) { - scan.setTimeStamp(ascan.timestamp); - } - if (ascan.timerange != null) { - scan.setTimeRange(ascan.timerange.minStamp, ascan.timerange.maxStamp); - } - if (ascan.maxVersions != null) { - scan.setMaxVersions(ascan.maxVersions); - } - return scan; - } - - // TODO(hammer): Better to return null or empty array? - static public GenericArray resultsToAResults(Result[] results) { - Schema s = Schema.createArray(AResult.SCHEMA$); - GenericData.Array aresults = null; - if (results != null && results.length > 0) { - aresults = new GenericData.Array(results.length, s); - for (Result result : results) { - aresults.add(resultToAResult(result)); - } - } else { - aresults = new GenericData.Array(0, s); - } - return aresults; - } -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/avro/package.html hbase-server/src/main/java/org/apache/hadoop/hbase/avro/package.html deleted file mode 100644 index 3166aca..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/avro/package.html +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - - -Provides an HBase Avro service. - -This directory contains an Avro interface definition file for an HBase RPC -service and a Java server implementation. - -This server has been Deprecated. - -

What is Avro?

- -

Avro is a data serialization and RPC system. For more, see the -current specification. -

- -

Description

- -

The HBase API is defined in the -file hbase.genavro. A server-side implementation of the API is in -org.apache.hadoop.hbase.avro.AvroServer. The generated interfaces, -types, and RPC utility files are checked into SVN under the -org.apache.hadoop.hbase.avro.generated directory. - -

- -

The files were generated by running the commands: -

-  java -jar avro-tools-1.4.1.jar idl hbase.avdl hbase.avpr
-  java -jar avro-tools-1.4.1.jar compile protocol hbase.avpr $HBASE_HOME/src/main/java
-
-

- -

The 'avro-tools-x.y.z.jar' jarfile is an Avro utility, and it is -distributed as a part of the Avro package. Additionally, specific -language runtime libraries are apart of the Avro package. A version of the -Java runtime is listed as a dendency in Maven. -

- -

To start AvroServer, use: -

-  ./bin/hbase avro start [--port=PORT]
-
-The default port is 9090. -

- -

To stop, use: -

-  ./bin/hbase-daemon.sh stop avro
-
-

- - diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/avro/TestAvroServer.java hbase-server/src/test/java/org/apache/hadoop/hbase/avro/TestAvroServer.java deleted file mode 100644 index 8915f6f..0000000 --- hbase-server/src/test/java/org/apache/hadoop/hbase/avro/TestAvroServer.java +++ /dev/null @@ -1,239 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.avro; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.nio.ByteBuffer; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericArray; -import org.apache.avro.generic.GenericData; -import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.MediumTests; -import org.apache.hadoop.hbase.avro.generated.AColumn; -import org.apache.hadoop.hbase.avro.generated.AColumnValue; -import org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor; -import org.apache.hadoop.hbase.avro.generated.AGet; -import org.apache.hadoop.hbase.avro.generated.APut; -import org.apache.hadoop.hbase.avro.generated.ATableDescriptor; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.Threads; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -/** - * Unit testing for AvroServer.HBaseImpl, a part of the - * org.apache.hadoop.hbase.avro package. - */ -@Category(MediumTests.class) -public class TestAvroServer { - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - - // Static names for tables, columns, rows, and values - // TODO(hammer): Better style to define these in test method? - private static ByteBuffer tableAname = ByteBuffer.wrap(Bytes.toBytes("tableA")); - private static ByteBuffer tableBname = ByteBuffer.wrap(Bytes.toBytes("tableB")); - private static ByteBuffer familyAname = ByteBuffer.wrap(Bytes.toBytes("FamilyA")); - private static ByteBuffer qualifierAname = ByteBuffer.wrap(Bytes.toBytes("QualifierA")); - private static ByteBuffer rowAname = ByteBuffer.wrap(Bytes.toBytes("RowA")); - private static ByteBuffer valueA = ByteBuffer.wrap(Bytes.toBytes("ValueA")); - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - TEST_UTIL.startMiniCluster(); - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - TEST_UTIL.shutdownMiniCluster(); - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - // Nothing to do. - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - // Nothing to do. - } - - /** - * Tests for creating, enabling, disabling, modifying, and deleting tables. - * - * @throws Exception - */ - @Test (timeout=300000) - public void testTableAdminAndMetadata() throws Exception { - AvroServer.HBaseImpl impl = - new AvroServer.HBaseImpl(TEST_UTIL.getConfiguration()); - - assertEquals(impl.listTables().size(), 0); - - ATableDescriptor tableA = new ATableDescriptor(); - tableA.name = tableAname; - impl.createTable(tableA); - assertEquals(impl.listTables().size(), 1); - assertTrue(impl.isTableEnabled(tableAname)); - assertTrue(impl.tableExists(tableAname)); - - ATableDescriptor tableB = new ATableDescriptor(); - tableB.name = tableBname; - impl.createTable(tableB); - assertEquals(impl.listTables().size(), 2); - - impl.disableTable(tableBname); - assertFalse(impl.isTableEnabled(tableBname)); - - impl.deleteTable(tableBname); - assertEquals(impl.listTables().size(), 1); - - impl.disableTable(tableAname); - assertFalse(impl.isTableEnabled(tableAname)); - - long oldMaxFileSize = impl.describeTable(tableAname).maxFileSize; - tableA.maxFileSize = 123456L; - impl.modifyTable(tableAname, tableA); - - // It can take a while for the change to take effect. Wait here a while. - while(impl.describeTable(tableAname).maxFileSize == oldMaxFileSize) { - Threads.sleep(100); - } - - assertTrue(impl.describeTable(tableAname).maxFileSize == 123456L); - assertEquals(123456L, (long) impl.describeTable(tableAname).maxFileSize); -/* DISABLED FOR NOW TILL WE HAVE BETTER DISABLE/ENABLE - impl.enableTable(tableAname); - assertTrue(impl.isTableEnabled(tableAname)); - - impl.disableTable(tableAname); - */ - impl.deleteTable(tableAname); - } - - /** - * Tests for creating, modifying, and deleting column families. - * - * @throws Exception - */ - @Test - public void testFamilyAdminAndMetadata() throws Exception { - AvroServer.HBaseImpl impl = - new AvroServer.HBaseImpl(TEST_UTIL.getConfiguration()); - - ATableDescriptor tableA = new ATableDescriptor(); - tableA.name = tableAname; - AFamilyDescriptor familyA = new AFamilyDescriptor(); - familyA.name = familyAname; - Schema familyArraySchema = Schema.createArray(AFamilyDescriptor.SCHEMA$); - GenericArray families = new GenericData.Array(1, familyArraySchema); - families.add(familyA); - tableA.families = families; - impl.createTable(tableA); - assertEquals(impl.describeTable(tableAname).families.size(), 1); - - impl.disableTable(tableAname); - assertFalse(impl.isTableEnabled(tableAname)); - - familyA.maxVersions = 123456; - impl.modifyFamily(tableAname, familyAname, familyA); - assertEquals((int) impl.describeFamily(tableAname, familyAname).maxVersions, 123456); - - impl.deleteFamily(tableAname, familyAname); - assertEquals(impl.describeTable(tableAname).families.size(), 0); - - impl.deleteTable(tableAname); - } - - /** - * Tests for adding, reading, and deleting data. - * - * @throws Exception - */ - @Test - public void testDML() throws Exception { - AvroServer.HBaseImpl impl = - new AvroServer.HBaseImpl(TEST_UTIL.getConfiguration()); - - ATableDescriptor tableA = new ATableDescriptor(); - tableA.name = tableAname; - AFamilyDescriptor familyA = new AFamilyDescriptor(); - familyA.name = familyAname; - Schema familyArraySchema = Schema.createArray(AFamilyDescriptor.SCHEMA$); - GenericArray families = new GenericData.Array(1, familyArraySchema); - families.add(familyA); - tableA.families = families; - impl.createTable(tableA); - assertEquals(impl.describeTable(tableAname).families.size(), 1); - - AGet getA = new AGet(); - getA.row = rowAname; - Schema columnsSchema = Schema.createArray(AColumn.SCHEMA$); - GenericArray columns = new GenericData.Array(1, columnsSchema); - AColumn column = new AColumn(); - column.family = familyAname; - column.qualifier = qualifierAname; - columns.add(column); - getA.columns = columns; - - assertFalse(impl.exists(tableAname, getA)); - - APut putA = new APut(); - putA.row = rowAname; - Schema columnValuesSchema = Schema.createArray(AColumnValue.SCHEMA$); - GenericArray columnValues = new GenericData.Array(1, columnValuesSchema); - AColumnValue acv = new AColumnValue(); - acv.family = familyAname; - acv.qualifier = qualifierAname; - acv.value = valueA; - columnValues.add(acv); - putA.columnValues = columnValues; - - impl.put(tableAname, putA); - assertTrue(impl.exists(tableAname, getA)); - - assertEquals(impl.get(tableAname, getA).entries.size(), 1); - - impl.disableTable(tableAname); - impl.deleteTable(tableAname); - } - - @org.junit.Rule - public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = - new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); -} - diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/avro/TestAvroUtil.java hbase-server/src/test/java/org/apache/hadoop/hbase/avro/TestAvroUtil.java deleted file mode 100644 index 80c8591..0000000 --- hbase-server/src/test/java/org/apache/hadoop/hbase/avro/TestAvroUtil.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.avro; - - -import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.avro.generated.AResult; -import org.apache.hadoop.hbase.client.Result; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.mockito.Mockito; - -@Category(SmallTests.class) -public class TestAvroUtil { - - - @Test - public void testGetEmpty() { - Result result = Mockito.mock(Result.class); - Mockito.when(result.getRow()).thenReturn(null); - //Get on a row, that does not exist, returns a result, - //whose row is null. - AResult aresult = AvroUtil.resultToAResult(result); - Assert.assertNotNull(aresult); - } - - - @org.junit.Rule - public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = - new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); -} - diff --git pom.xml pom.xml index 7a8a643..13903a6 100644 --- pom.xml +++ pom.xml @@ -360,7 +360,7 @@ ${skipJavadoc} true - org.apache.hadoop.hbase.protobuf.generated.*:org.apache.hadoop.hbase.avro.generated.*:org.apache.hadoop.hbase.thrift.generated:org.apache.hadoop.hbase.rest.generated + org.apache.hadoop.hbase.protobuf.generated.*:org.apache.hadoop.hbase.thrift.generated:org.apache.hadoop.hbase.rest.generated true 2g true @@ -482,11 +482,6 @@ - org.apache.avro - avro-maven-plugin - ${avro.version} - - org.codehaus.mojo build-helper-maven-plugin 1.5 @@ -797,7 +792,6 @@ 2.0.0-alpha 1.0.3 - 1.5.3 1.2 1.4 3.1 @@ -1036,7 +1030,7 @@ servlet-api-2.5 ${jetty.jspapi.version} -