diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
index a933f41..3c307fb 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
@@ -187,5 +187,17 @@
leveldbjni-all
+
+ org.apache.hbase
+ hbase-client
+ 0.98.0-hadoop2
+
+
+ org.apache.hbase
+ hbase-testing-util
+ 0.98.0-hadoop2
+ test
+
+
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/HBaseTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/HBaseTimelineStore.java
new file mode 100644
index 0000000..a0cbfdb
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/HBaseTimelineStore.java
@@ -0,0 +1,720 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeMap;
+
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.HBaseTimelineStoreUtil.UTF8_CHARSET;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.readReverseOrderedLong;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.HBaseTimelineStoreUtil.createPrimaryFilterColumnQualifier;
+
+public class HBaseTimelineStore extends AbstractService
+ implements TimelineStore {
+
+ static final Log LOG = LogFactory.getLog(HBaseTimelineStore.class);
+
+ private static final String TABLE_NAME_PREFIX = "timeline.";
+
+ /**
+ * Default age off time is one week
+ */
+ private static final int DEFAULT_TTL = 60 * 60 * 24 * 7;
+ public static final String HBASE_TTL_PROPERTY =
+ YarnConfiguration.TIMELINE_SERVICE_PREFIX + "hbase-ttl";
+ public static final String HBASE_MASTER_PROPERTY =
+ YarnConfiguration.TIMELINE_SERVICE_PREFIX + "hbase-master";
+
+ private static final String START_TIME_TABLE = TABLE_NAME_PREFIX +
+ "starttime";
+ private static final String ENTITY_TABLE = TABLE_NAME_PREFIX + "entity";
+ private static final String INDEX_TABLE = TABLE_NAME_PREFIX + "index";
+
+ private static final byte[] START_TIME_COLUMN = "s".getBytes(UTF8_CHARSET);
+
+ private static final byte[] EVENTS_COLUMN = "e".getBytes(UTF8_CHARSET);
+ private static final byte[] PRIMARY_FILTERS_COLUMN =
+ "f".getBytes(UTF8_CHARSET);
+ private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(UTF8_CHARSET);
+ private static final byte[] RELATED_ENTITIES_COLUMN =
+ "r".getBytes(UTF8_CHARSET);
+
+ private static final byte[] EMPTY_BYTES = new byte[0];
+
+ private HConnection connection;
+
+ public HBaseTimelineStore() {
+ super(HBaseTimelineStore.class.getName());
+ }
+
+ private HColumnDescriptor createFamily(byte[] b, int ttl) {
+ HColumnDescriptor column = new HColumnDescriptor(b);
+ column.setTimeToLive(ttl);
+ return column;
+ }
+
+ @Override
+ protected void serviceInit(Configuration conf) throws Exception {
+ HBaseAdmin hbase = initHBase(conf);
+ int ttl = conf.getInt(HBASE_TTL_PROPERTY, DEFAULT_TTL);
+ TableName startTimeTableName = TableName.valueOf(START_TIME_TABLE);
+ if (!hbase.tableExists(startTimeTableName)) {
+ HTableDescriptor desc = new HTableDescriptor(startTimeTableName);
+ desc.addFamily(createFamily(START_TIME_COLUMN, ttl));
+ hbase.createTable(desc);
+ LOG.info("Created hbase table " + START_TIME_TABLE);
+ }
+ TableName entityTableName = TableName.valueOf(ENTITY_TABLE);
+ if (!hbase.tableExists(entityTableName)) {
+ HTableDescriptor desc = new HTableDescriptor(entityTableName);
+ desc.addFamily(createFamily(EVENTS_COLUMN, ttl));
+ desc.addFamily(createFamily(PRIMARY_FILTERS_COLUMN, ttl));
+ desc.addFamily(createFamily(OTHER_INFO_COLUMN, ttl));
+ desc.addFamily(createFamily(RELATED_ENTITIES_COLUMN, ttl));
+ hbase.createTable(desc);
+ LOG.info("Created hbase table " + ENTITY_TABLE);
+ }
+ TableName indexTableName = TableName.valueOf(INDEX_TABLE);
+ if (!hbase.tableExists(indexTableName)) {
+ HTableDescriptor desc = new HTableDescriptor(indexTableName);
+ desc.addFamily(createFamily(EVENTS_COLUMN, ttl));
+ desc.addFamily(createFamily(PRIMARY_FILTERS_COLUMN, ttl));
+ desc.addFamily(createFamily(OTHER_INFO_COLUMN, ttl));
+ desc.addFamily(createFamily(RELATED_ENTITIES_COLUMN, ttl));
+ hbase.createTable(desc);
+ LOG.info("Created hbase table " + INDEX_TABLE);
+ }
+ super.serviceInit(conf);
+ }
+
+ @Override
+ protected void serviceStop() throws Exception {
+ IOUtils.cleanup(LOG, connection);
+ super.serviceStop();
+ }
+
+ @Override
+ public TimelineEntities getEntities(String entityType, Long limit,
+ Long starttime, Long endtime, String fromId, Long fromTs,
+ NameValuePair primaryFilter, Collection secondaryFilters,
+ EnumSet fieldsToRetrieve) throws IOException {
+ //TODO: fromId and fromTs not implemented
+
+ if (endtime == null) {
+ // if end time is null, place no restriction on end time
+ endtime = Long.MAX_VALUE;
+ }
+ // using end time, construct a first key that will be seeked to
+ byte[] firstRow = HBaseTimelineStoreUtil.createEntityStartOrEndRow(
+ entityType, writeReverseOrderedLong(endtime));
+ byte[] lastRow = HBaseTimelineStoreUtil.createEntityTypeEndRow(entityType);
+ if (starttime != null) {
+ // if start time is not null, set a last key that will not be
+ // iterated past
+ lastRow = HBaseTimelineStoreUtil.createEntityStartOrEndRow(entityType,
+ writeReverseOrderedLong(starttime));
+ }
+ if (limit == null) {
+ // if limit is not specified, use the default
+ limit = DEFAULT_LIMIT;
+ }
+
+ int entityOffset = 0;
+ HTableInterface table = null;
+ ResultScanner rs = null;
+ try {
+ if (primaryFilter == null) {
+ table = getTable(ENTITY_TABLE);
+ } else {
+ table = getTable(INDEX_TABLE);
+ entityOffset = firstRow.length;
+ firstRow = HBaseTimelineStoreUtil.createIndexRow(
+ primaryFilter.getName(), primaryFilter.getValue(), firstRow);
+ entityOffset = firstRow.length - entityOffset;
+ lastRow = HBaseTimelineStoreUtil.createIndexRow(
+ primaryFilter.getName(), primaryFilter.getValue(), lastRow);
+ }
+
+ Scan scan = new Scan(firstRow, lastRow);
+ if (fieldsToRetrieve == null) {
+ fieldsToRetrieve = EnumSet.allOf(Field.class);
+ }
+ if (fieldsToRetrieve.contains(Field.EVENTS) ||
+ fieldsToRetrieve.contains(Field.LAST_EVENT_ONLY)) {
+ scan.addFamily(EVENTS_COLUMN);
+ }
+ if (fieldsToRetrieve.contains(Field.RELATED_ENTITIES)) {
+ scan.addFamily(RELATED_ENTITIES_COLUMN);
+ }
+ if (secondaryFilters != null ||
+ fieldsToRetrieve.contains(Field.PRIMARY_FILTERS)) {
+ scan.addFamily(PRIMARY_FILTERS_COLUMN);
+ }
+ if (secondaryFilters != null ||
+ fieldsToRetrieve.contains(Field.OTHER_INFO)) {
+ scan.addFamily(OTHER_INFO_COLUMN);
+ }
+
+ /*
+ //TODO: server-side filtering not implemented
+ if (secondaryFilters != null) {
+ FilterList filterList = null;
+ if (secondaryFilters.size() == 1) {
+ for (NameValuePair filter : secondaryFilters) {
+ filterList = buildFilter(filter);
+ }
+ } else {
+ filterList = new FilterList(Operator.MUST_PASS_ALL);
+ for (NameValuePair filter : secondaryFilters) {
+ filterList.addFilter(buildFilter(filter));
+ }
+ }
+ System.out.println("filter list "+filterList);
+ scan.setFilter(filterList);
+ }
+ */
+
+ TimelineEntities entities = new TimelineEntities();
+ rs = table.getScanner(scan);
+ for (Result result = rs.next(); result != null; result = rs.next()) {
+ byte[] row = result.getRow();
+ TimelineEntity entity = HBaseTimelineStoreUtil.parseEntityRow(row,
+ entityOffset, row.length - entityOffset);
+ if (getEntityFromResult(entity, result, fieldsToRetrieve)) {
+ //TODO: remove client-side filtering once server-side is working
+ // determine if the retrieved entity matches the provided secondary
+ // filters, and if so add it to the list of entities to return
+ boolean filterPassed = true;
+ if (secondaryFilters != null) {
+ for (NameValuePair filter : secondaryFilters) {
+ // check other info for filtered field
+ Object v = entity.getOtherInfo().get(filter.getName());
+ if (v == null) {
+ // if field is not found in other info, check in primary filters
+ Set