diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/HBaseTimelineFilterUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/HBaseTimelineFilterUtils.java new file mode 100644 index 0000000..97cfcf7 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/HBaseTimelineFilterUtils.java @@ -0,0 +1,298 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.reader; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.FamilyFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.FilterList.Operator; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; +import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; +import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; + +/** + * Set of utility methods used by timeline filter classes. + */ +public final class HBaseTimelineFilterUtils { + + private static final Log LOG = LogFactory.getLog(HBaseTimelineFilterUtils.class); + + private HBaseTimelineFilterUtils() { + } + + /** + * Returns the equivalent HBase filter list's {@link Operator}. + * + * @param op timeline filter list operator. + * @return HBase filter list's Operator. + */ + private static Operator getHBaseOperator(TimelineFilterList.Operator op) { + switch (op) { + case AND: + return Operator.MUST_PASS_ALL; + case OR: + return Operator.MUST_PASS_ONE; + default: + throw new IllegalArgumentException("Invalid operator"); + } + } + + /** + * Returns the equivalent HBase compare filter's {@link CompareOp}. + * + * @param op timeline compare op. + * @return HBase compare filter's CompareOp. + */ + private static CompareOp getHBaseCompareOp( + TimelineCompareOp op) { + switch (op) { + case LESS_THAN: + return CompareOp.LESS; + case LESS_OR_EQUAL: + return CompareOp.LESS_OR_EQUAL; + case EQUAL: + return CompareOp.EQUAL; + case NOT_EQUAL: + return CompareOp.NOT_EQUAL; + case GREATER_OR_EQUAL: + return CompareOp.GREATER_OR_EQUAL; + case GREATER_THAN: + return CompareOp.GREATER; + default: + throw new IllegalArgumentException("Invalid compare operator"); + } + } + + /** + * Converts a {@link TimelinePrefixFilter} to an equivalent HBase + * {@link QualifierFilter}. + * @param colPrefix + * @param filter + * @return a {@link QualifierFilter} object + */ + private static Filter createHBaseColQualPrefixFilter( + ColumnPrefix colPrefix, TimelinePrefixFilter filter) { + return new QualifierFilter(getHBaseCompareOp(filter.getCompareOp()), + new BinaryPrefixComparator( + colPrefix.getColumnPrefixBytes(filter.getPrefix()))); + } + + /** + * Create a HBase {@link QualifierFilter} for the passed column prefix and + * compare op. + * + * @param Describes the type of column prefix. + * @param compareOp compare op. + * @param columnPrefix column prefix. + * @return a column qualifier filter. + */ + public static Filter createHBaseQualifierFilter(CompareOp compareOp, + ColumnPrefix columnPrefix) { + return new QualifierFilter(compareOp, + new BinaryPrefixComparator( + columnPrefix.getColumnPrefixBytes(""))); + } + + /** + * Create filters for confs or metrics to retrieve. This list includes a + * configs/metrics family filter and relevant filters for confs/metrics to + * retrieve, if present. + * + * @param Describes the type of column prefix. + * @param confsOrMetricToRetrieve configs/metrics to retrieve. + * @param columnFamily config or metric column family. + * @param columnPrefix config or metric column prefix. + * @return a filter list. + * @throws IOException if any problem occurs while creating the filters. + */ + public static Filter createFilterForConfsOrMetricsToRetrieve( + TimelineFilterList confsOrMetricToRetrieve, ColumnFamily columnFamily, + ColumnPrefix columnPrefix) throws IOException { + Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, + new BinaryComparator(columnFamily.getBytes())); + if (confsOrMetricToRetrieve != null && + !confsOrMetricToRetrieve.getFilterList().isEmpty()) { + // If confsOrMetricsToRetrive are specified, create a filter list based + // on it and family filter. + FilterList filter = new FilterList(familyFilter); + filter.addFilter( + createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve)); + return filter; + } else { + // Only the family filter needs to be added. + return familyFilter; + } + } + + /** + * Create 2 HBase {@link SingleColumnValueFilter} filters for the specified + * value range represented by start and end value and wraps them inside a + * filter list. Start and end value should not be null. + * + * @param Describes the type of column prefix. + * @param column Column for which single column value filter is to be created. + * @param startValue Start value. + * @param endValue End value. + * @return 2 single column value filters wrapped in a filter list. + * @throws IOException if any problem is encountered while encoding value. + */ + public static FilterList createSingleColValueFiltersByRange( + Column column, Object startValue, Object endValue) throws IOException { + FilterList list = new FilterList(); + Filter singleColValFilterStart = createHBaseSingleColValueFilter( + column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), + column.getValueConverter().encodeValue(startValue), + CompareOp.GREATER_OR_EQUAL, true); + list.addFilter(singleColValFilterStart); + + Filter singleColValFilterEnd = createHBaseSingleColValueFilter( + column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), + column.getValueConverter().encodeValue(endValue), + CompareOp.LESS_OR_EQUAL, true); + list.addFilter(singleColValFilterEnd); + return list; + } + + /** + * Creates a HBase {@link SingleColumnValueFilter}. + * + * @param columnFamily Column Family represented as bytes. + * @param columnQualifier Column Qualifier represented as bytes. + * @param value Value. + * @param compareOp Compare operator. + * @param filterIfMissing This flag decides if we should filter the row if the + * specified column is missing. This is based on the filter's keyMustExist + * field. + * @return a {@link SingleColumnValueFilter} object + * @throws IOException + */ + private static SingleColumnValueFilter createHBaseSingleColValueFilter( + byte[] columnFamily, byte[] columnQualifier, byte[] value, + CompareOp compareOp, boolean filterIfMissing) throws IOException { + SingleColumnValueFilter singleColValFilter = + new SingleColumnValueFilter(columnFamily, columnQualifier, compareOp, + new BinaryComparator(value)); + singleColValFilter.setLatestVersionOnly(true); + singleColValFilter.setFilterIfMissing(filterIfMissing); + return singleColValFilter; + } + + /** + * Fetch columns from filter list containing exists and multivalue equality + * filters. This is done to fetch only required columns from back-end and + * then match event filters or relationships in reader. + * + * @param filterList filter list. + * @return set of columns. + */ + public static Set fetchColumnsFromFilterList( + TimelineFilterList filterList) { + Set strSet = new HashSet(); + for (TimelineFilter filter : filterList.getFilterList()) { + switch(filter.getFilterType()) { + case LIST: + strSet.addAll(fetchColumnsFromFilterList((TimelineFilterList)filter)); + break; + case KEY_VALUES: + strSet.add(((TimelineKeyValuesFilter)filter).getKey()); + break; + case EXISTS: + strSet.add(((TimelineExistsFilter)filter).getValue()); + break; + default: + LOG.info("Unexpected filter type " + filter.getFilterType()); + break; + } + } + return strSet; + } + + /** + * Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList} + * while converting different timeline filters(of type {@link TimelineFilter}) + * into their equivalent HBase filters. + * + * @param Describes the type of column prefix. + * @param colPrefix column prefix which will be used for conversion. + * @param filterList timeline filter list which has to be converted. + * @return A {@link FilterList} object. + * @throws IOException if any problem occurs while creating the filter list. + */ + public static FilterList createHBaseFilterList(ColumnPrefix colPrefix, + TimelineFilterList filterList) throws IOException { + FilterList list = + new FilterList(getHBaseOperator(filterList.getOperator())); + for (TimelineFilter filter : filterList.getFilterList()) { + switch(filter.getFilterType()) { + case LIST: + list.addFilter(createHBaseFilterList(colPrefix, + (TimelineFilterList)filter)); + break; + case PREFIX: + list.addFilter(createHBaseColQualPrefixFilter(colPrefix, + (TimelinePrefixFilter)filter)); + break; + case COMPARE: + TimelineCompareFilter compareFilter = (TimelineCompareFilter)filter; + list.addFilter( + createHBaseSingleColValueFilter( + colPrefix.getColumnFamilyBytes(), + colPrefix.getColumnPrefixBytes(compareFilter.getKey()), + colPrefix.getValueConverter(). + encodeValue(compareFilter.getValue()), + getHBaseCompareOp(compareFilter.getCompareOp()), + compareFilter.getKeyMustExist())); + break; + case KEY_VALUE: + TimelineKeyValueFilter kvFilter = (TimelineKeyValueFilter)filter; + list.addFilter( + createHBaseSingleColValueFilter( + colPrefix.getColumnFamilyBytes(), + colPrefix.getColumnPrefixBytes(kvFilter.getKey()), + colPrefix.getValueConverter().encodeValue(kvFilter.getValue()), + getHBaseCompareOp(kvFilter.getCompareOp()), + kvFilter.getKeyMustExist())); + break; + default: + LOG.info("Unexpected filter type " + filter.getFilterType()); + break; + } + } + return list; + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineFilterUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineFilterUtils.java deleted file mode 100644 index cccae26..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineFilterUtils.java +++ /dev/null @@ -1,290 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.timelineservice.reader.filter; - -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; -import org.apache.hadoop.hbase.filter.FamilyFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.Filter; -import org.apache.hadoop.hbase.filter.FilterList; -import org.apache.hadoop.hbase.filter.FilterList.Operator; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily; -import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix; -import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; - -/** - * Set of utility methods used by timeline filter classes. - */ -public final class TimelineFilterUtils { - - private static final Log LOG = LogFactory.getLog(TimelineFilterUtils.class); - - private TimelineFilterUtils() { - } - - /** - * Returns the equivalent HBase filter list's {@link Operator}. - * - * @param op timeline filter list operator. - * @return HBase filter list's Operator. - */ - private static Operator getHBaseOperator(TimelineFilterList.Operator op) { - switch (op) { - case AND: - return Operator.MUST_PASS_ALL; - case OR: - return Operator.MUST_PASS_ONE; - default: - throw new IllegalArgumentException("Invalid operator"); - } - } - - /** - * Returns the equivalent HBase compare filter's {@link CompareOp}. - * - * @param op timeline compare op. - * @return HBase compare filter's CompareOp. - */ - private static CompareOp getHBaseCompareOp( - TimelineCompareOp op) { - switch (op) { - case LESS_THAN: - return CompareOp.LESS; - case LESS_OR_EQUAL: - return CompareOp.LESS_OR_EQUAL; - case EQUAL: - return CompareOp.EQUAL; - case NOT_EQUAL: - return CompareOp.NOT_EQUAL; - case GREATER_OR_EQUAL: - return CompareOp.GREATER_OR_EQUAL; - case GREATER_THAN: - return CompareOp.GREATER; - default: - throw new IllegalArgumentException("Invalid compare operator"); - } - } - - /** - * Converts a {@link TimelinePrefixFilter} to an equivalent HBase - * {@link QualifierFilter}. - * @param colPrefix - * @param filter - * @return a {@link QualifierFilter} object - */ - private static Filter createHBaseColQualPrefixFilter( - ColumnPrefix colPrefix, TimelinePrefixFilter filter) { - return new QualifierFilter(getHBaseCompareOp(filter.getCompareOp()), - new BinaryPrefixComparator( - colPrefix.getColumnPrefixBytes(filter.getPrefix()))); - } - - /** - * Create a HBase {@link QualifierFilter} for the passed column prefix and - * compare op. - * - * @param Describes the type of column prefix. - * @param compareOp compare op. - * @param columnPrefix column prefix. - * @return a column qualifier filter. - */ - public static Filter createHBaseQualifierFilter(CompareOp compareOp, - ColumnPrefix columnPrefix) { - return new QualifierFilter(compareOp, - new BinaryPrefixComparator( - columnPrefix.getColumnPrefixBytes(""))); - } - - /** - * Create filters for confs or metrics to retrieve. This list includes a - * configs/metrics family filter and relevant filters for confs/metrics to - * retrieve, if present. - * - * @param Describes the type of column prefix. - * @param confsOrMetricToRetrieve configs/metrics to retrieve. - * @param columnFamily config or metric column family. - * @param columnPrefix config or metric column prefix. - * @return a filter list. - * @throws IOException if any problem occurs while creating the filters. - */ - public static Filter createFilterForConfsOrMetricsToRetrieve( - TimelineFilterList confsOrMetricToRetrieve, ColumnFamily columnFamily, - ColumnPrefix columnPrefix) throws IOException { - Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, - new BinaryComparator(columnFamily.getBytes())); - if (confsOrMetricToRetrieve != null && - !confsOrMetricToRetrieve.getFilterList().isEmpty()) { - // If confsOrMetricsToRetrive are specified, create a filter list based - // on it and family filter. - FilterList filter = new FilterList(familyFilter); - filter.addFilter( - createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve)); - return filter; - } else { - // Only the family filter needs to be added. - return familyFilter; - } - } - - /** - * Create 2 HBase {@link SingleColumnValueFilter} filters for the specified - * value range represented by start and end value and wraps them inside a - * filter list. Start and end value should not be null. - * - * @param Describes the type of column prefix. - * @param column Column for which single column value filter is to be created. - * @param startValue Start value. - * @param endValue End value. - * @return 2 single column value filters wrapped in a filter list. - * @throws IOException if any problem is encountered while encoding value. - */ - public static FilterList createSingleColValueFiltersByRange( - Column column, Object startValue, Object endValue) throws IOException { - FilterList list = new FilterList(); - Filter singleColValFilterStart = createHBaseSingleColValueFilter( - column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), - column.getValueConverter().encodeValue(startValue), - CompareOp.GREATER_OR_EQUAL, true); - list.addFilter(singleColValFilterStart); - - Filter singleColValFilterEnd = createHBaseSingleColValueFilter( - column.getColumnFamilyBytes(), column.getColumnQualifierBytes(), - column.getValueConverter().encodeValue(endValue), - CompareOp.LESS_OR_EQUAL, true); - list.addFilter(singleColValFilterEnd); - return list; - } - - /** - * Creates a HBase {@link SingleColumnValueFilter}. - * - * @param columnFamily Column Family represented as bytes. - * @param columnQualifier Column Qualifier represented as bytes. - * @param value Value. - * @param compareOp Compare operator. - * @param filterIfMissing This flag decides if we should filter the row if the - * specified column is missing. This is based on the filter's keyMustExist - * field. - * @return a {@link SingleColumnValueFilter} object - * @throws IOException - */ - private static SingleColumnValueFilter createHBaseSingleColValueFilter( - byte[] columnFamily, byte[] columnQualifier, byte[] value, - CompareOp compareOp, boolean filterIfMissing) throws IOException { - SingleColumnValueFilter singleColValFilter = - new SingleColumnValueFilter(columnFamily, columnQualifier, compareOp, - new BinaryComparator(value)); - singleColValFilter.setLatestVersionOnly(true); - singleColValFilter.setFilterIfMissing(filterIfMissing); - return singleColValFilter; - } - - /** - * Fetch columns from filter list containing exists and multivalue equality - * filters. This is done to fetch only required columns from back-end and - * then match event filters or relationships in reader. - * - * @param filterList filter list. - * @return set of columns. - */ - public static Set fetchColumnsFromFilterList( - TimelineFilterList filterList) { - Set strSet = new HashSet(); - for (TimelineFilter filter : filterList.getFilterList()) { - switch(filter.getFilterType()) { - case LIST: - strSet.addAll(fetchColumnsFromFilterList((TimelineFilterList)filter)); - break; - case KEY_VALUES: - strSet.add(((TimelineKeyValuesFilter)filter).getKey()); - break; - case EXISTS: - strSet.add(((TimelineExistsFilter)filter).getValue()); - break; - default: - LOG.info("Unexpected filter type " + filter.getFilterType()); - break; - } - } - return strSet; - } - - /** - * Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList} - * while converting different timeline filters(of type {@link TimelineFilter}) - * into their equivalent HBase filters. - * - * @param Describes the type of column prefix. - * @param colPrefix column prefix which will be used for conversion. - * @param filterList timeline filter list which has to be converted. - * @return A {@link FilterList} object. - * @throws IOException if any problem occurs while creating the filter list. - */ - public static FilterList createHBaseFilterList(ColumnPrefix colPrefix, - TimelineFilterList filterList) throws IOException { - FilterList list = - new FilterList(getHBaseOperator(filterList.getOperator())); - for (TimelineFilter filter : filterList.getFilterList()) { - switch(filter.getFilterType()) { - case LIST: - list.addFilter(createHBaseFilterList(colPrefix, - (TimelineFilterList)filter)); - break; - case PREFIX: - list.addFilter(createHBaseColQualPrefixFilter(colPrefix, - (TimelinePrefixFilter)filter)); - break; - case COMPARE: - TimelineCompareFilter compareFilter = (TimelineCompareFilter)filter; - list.addFilter( - createHBaseSingleColValueFilter( - colPrefix.getColumnFamilyBytes(), - colPrefix.getColumnPrefixBytes(compareFilter.getKey()), - colPrefix.getValueConverter(). - encodeValue(compareFilter.getValue()), - getHBaseCompareOp(compareFilter.getCompareOp()), - compareFilter.getKeyMustExist())); - break; - case KEY_VALUE: - TimelineKeyValueFilter kvFilter = (TimelineKeyValueFilter)filter; - list.addFilter( - createHBaseSingleColValueFilter( - colPrefix.getColumnFamilyBytes(), - colPrefix.getColumnPrefixBytes(kvFilter.getKey()), - colPrefix.getValueConverter().encodeValue(kvFilter.getValue()), - getHBaseCompareOp(kvFilter.getCompareOp()), - kvFilter.getKeyMustExist())); - break; - default: - LOG.info("Unexpected filter type " + filter.getFilterType()); - break; - } - } - return list; - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java index aa2bfda..fbfd900 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java @@ -40,7 +40,7 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; -import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; +import org.apache.hadoop.yarn.server.timelineservice.reader.HBaseTimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnFamily; @@ -95,7 +95,7 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { long createdTimeEnd = filters.getCreatedTimeEnd(); if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) { listBasedOnFilters.addFilter( - TimelineFilterUtils.createSingleColValueFiltersByRange( + HBaseTimelineFilterUtils.createSingleColValueFiltersByRange( ApplicationColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd)); } // Create filter list based on metric filters and add it to @@ -103,7 +103,7 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { TimelineFilterList metricFilters = filters.getMetricFilters(); if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter( - TimelineFilterUtils.createHBaseFilterList( + HBaseTimelineFilterUtils.createHBaseFilterList( ApplicationColumnPrefix.METRIC, metricFilters)); } // Create filter list based on config filters and add it to @@ -111,14 +111,14 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { TimelineFilterList configFilters = filters.getConfigFilters(); if (configFilters != null && !configFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter( - TimelineFilterUtils.createHBaseFilterList( + HBaseTimelineFilterUtils.createHBaseFilterList( ApplicationColumnPrefix.CONFIG, configFilters)); } // Create filter list based on info filters and add it to listBasedOnFilters TimelineFilterList infoFilters = filters.getInfoFilters(); if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) { listBasedOnFilters.addFilter( - TimelineFilterUtils.createHBaseFilterList( + HBaseTimelineFilterUtils.createHBaseFilterList( ApplicationColumnPrefix.INFO, infoFilters)); } return listBasedOnFilters; @@ -155,7 +155,7 @@ private FilterList createFilterListForColsOfInfoFamily() // with INFO column prefix. if (hasField(fieldsToRetrieve, Field.INFO)) { infoFamilyColsFilter.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, ApplicationColumnPrefix.INFO)); } TimelineFilterList relatesTo = getFilters().getRelatesTo(); @@ -163,7 +163,7 @@ private FilterList createFilterListForColsOfInfoFamily() // If RELATES_TO field has to be retrieved, add a filter for fetching // columns with RELATES_TO column prefix. infoFamilyColsFilter.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, ApplicationColumnPrefix.RELATES_TO)); } else if (relatesTo != null && !relatesTo.getFilterList().isEmpty()) { // Even if fields to retrieve does not contain RELATES_TO, we still @@ -171,7 +171,7 @@ private FilterList createFilterListForColsOfInfoFamily() // relatesTo filters are specified. relatesTo filters will then be // matched after fetching rows from HBase. Set relatesToCols = - TimelineFilterUtils.fetchColumnsFromFilterList(relatesTo); + HBaseTimelineFilterUtils.fetchColumnsFromFilterList(relatesTo); infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( ApplicationColumnPrefix.RELATES_TO, relatesToCols)); } @@ -180,7 +180,7 @@ private FilterList createFilterListForColsOfInfoFamily() // If IS_RELATED_TO field has to be retrieved, add a filter for fetching // columns with IS_RELATED_TO column prefix. infoFamilyColsFilter.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, ApplicationColumnPrefix.IS_RELATED_TO)); } else if (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty()) { // Even if fields to retrieve does not contain IS_RELATED_TO, we still @@ -188,7 +188,7 @@ private FilterList createFilterListForColsOfInfoFamily() // isRelatedTo filters are specified. isRelatedTo filters will then be // matched after fetching rows from HBase. Set isRelatedToCols = - TimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo); + HBaseTimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo); infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( ApplicationColumnPrefix.IS_RELATED_TO, isRelatedToCols)); } @@ -197,7 +197,7 @@ private FilterList createFilterListForColsOfInfoFamily() // If EVENTS field has to be retrieved, add a filter for fetching columns // with EVENT column prefix. infoFamilyColsFilter.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, ApplicationColumnPrefix.EVENT)); } else if (eventFilters != null && !eventFilters.getFilterList().isEmpty()){ // Even if fields to retrieve does not contain EVENTS, we still need to @@ -205,7 +205,7 @@ private FilterList createFilterListForColsOfInfoFamily() // event filters specified. Event filters will then be matched after // fetching rows from HBase. Set eventCols = - TimelineFilterUtils.fetchColumnsFromFilterList(eventFilters); + HBaseTimelineFilterUtils.fetchColumnsFromFilterList(eventFilters); infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( ApplicationColumnPrefix.EVENT, eventCols)); } @@ -224,25 +224,25 @@ private void excludeFieldsFromInfoColFamily(FilterList infoColFamilyList) { // Events not required. if (!hasField(fieldsToRetrieve, Field.EVENTS)) { infoColFamilyList.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.NOT_EQUAL, ApplicationColumnPrefix.EVENT)); } // info not required. if (!hasField(fieldsToRetrieve, Field.INFO)) { infoColFamilyList.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.NOT_EQUAL, ApplicationColumnPrefix.INFO)); } // is related to not required. if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) { infoColFamilyList.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.NOT_EQUAL, ApplicationColumnPrefix.IS_RELATED_TO)); } // relates to not required. if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) { infoColFamilyList.addFilter( - TimelineFilterUtils.createHBaseQualifierFilter( + HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.NOT_EQUAL, ApplicationColumnPrefix.RELATES_TO)); } } @@ -260,7 +260,7 @@ private void updateFilterForConfsAndMetricsToRetrieve( // CONFS to fields to retrieve in augmentParams() even if not specified. if (dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS)) { // Create a filter list for configs. - listBasedOnFields.addFilter(TimelineFilterUtils. + listBasedOnFields.addFilter(HBaseTimelineFilterUtils. createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getConfsToRetrieve(), ApplicationColumnFamily.CONFIGS, ApplicationColumnPrefix.CONFIG)); @@ -270,7 +270,7 @@ private void updateFilterForConfsAndMetricsToRetrieve( // METRICS to fields to retrieve in augmentParams() even if not specified. if (dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS)) { // Create a filter list for metrics. - listBasedOnFields.addFilter(TimelineFilterUtils. + listBasedOnFields.addFilter(HBaseTimelineFilterUtils. createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getMetricsToRetrieve(), ApplicationColumnFamily.METRICS, ApplicationColumnPrefix.METRIC)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java index 986a28f..99bacfa 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java @@ -36,11 +36,11 @@ import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; +import org.apache.hadoop.yarn.server.timelineservice.reader.HBaseTimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; -import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable; import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix; @@ -120,14 +120,14 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { Long createdTimeBegin = getFilters().getCreatedTimeBegin(); Long createdTimeEnd = getFilters().getCreatedTimeEnd(); if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) { - listBasedOnFilters.addFilter(TimelineFilterUtils + listBasedOnFilters.addFilter(HBaseTimelineFilterUtils .createSingleColValueFiltersByRange(FlowRunColumn.MIN_START_TIME, createdTimeBegin, createdTimeEnd)); } // Filter based on metric filters. TimelineFilterList metricFilters = getFilters().getMetricFilters(); if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) { - listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( + listBasedOnFilters.addFilter(HBaseTimelineFilterUtils.createHBaseFilterList( FlowRunColumnPrefix.METRIC, metricFilters)); } return listBasedOnFilters; @@ -180,7 +180,7 @@ protected FilterList constructFilterListBasedOnFields() throws IOException { FilterList infoColFamilyList = new FilterList(); infoColFamilyList.addFilter(infoColumnFamily); FilterList columnsList = updateFixedColumns(); - columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList( + columnsList.addFilter(HBaseTimelineFilterUtils.createHBaseFilterList( FlowRunColumnPrefix.METRIC, metricsToRetrieve)); infoColFamilyList.addFilter(columnsList); list.addFilter(infoColFamilyList); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java index 4e1ab8a..6bb35aa 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java @@ -35,11 +35,11 @@ import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; +import org.apache.hadoop.yarn.server.timelineservice.reader.HBaseTimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; -import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumn; import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey; @@ -107,7 +107,7 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { long createdTimeBegin = filters.getCreatedTimeBegin(); long createdTimeEnd = filters.getCreatedTimeEnd(); if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) { - listBasedOnFilters.addFilter(TimelineFilterUtils + listBasedOnFilters.addFilter(HBaseTimelineFilterUtils .createSingleColValueFiltersByRange(EntityColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd)); } @@ -115,20 +115,20 @@ protected FilterList constructFilterListBasedOnFilters() throws IOException { // listBasedOnFilters. TimelineFilterList metricFilters = filters.getMetricFilters(); if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) { - listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( + listBasedOnFilters.addFilter(HBaseTimelineFilterUtils.createHBaseFilterList( EntityColumnPrefix.METRIC, metricFilters)); } // Create filter list based on config filters and add it to // listBasedOnFilters. TimelineFilterList configFilters = filters.getConfigFilters(); if (configFilters != null && !configFilters.getFilterList().isEmpty()) { - listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( + listBasedOnFilters.addFilter(HBaseTimelineFilterUtils.createHBaseFilterList( EntityColumnPrefix.CONFIG, configFilters)); } // Create filter list based on info filters and add it to listBasedOnFilters TimelineFilterList infoFilters = filters.getInfoFilters(); if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) { - listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList( + listBasedOnFilters.addFilter(HBaseTimelineFilterUtils.createHBaseFilterList( EntityColumnPrefix.INFO, infoFilters)); } return listBasedOnFilters; @@ -251,14 +251,14 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { // with INFO column prefix. if (hasField(fieldsToRetrieve, Field.INFO)) { infoFamilyColsFilter - .addFilter(TimelineFilterUtils.createHBaseQualifierFilter( + .addFilter(HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, EntityColumnPrefix.INFO)); } TimelineFilterList relatesTo = getFilters().getRelatesTo(); if (hasField(fieldsToRetrieve, Field.RELATES_TO)) { // If RELATES_TO field has to be retrieved, add a filter for fetching // columns with RELATES_TO column prefix. - infoFamilyColsFilter.addFilter(TimelineFilterUtils + infoFamilyColsFilter.addFilter(HBaseTimelineFilterUtils .createHBaseQualifierFilter(CompareOp.EQUAL, EntityColumnPrefix.RELATES_TO)); } else if (relatesTo != null && !relatesTo.getFilterList().isEmpty()) { @@ -267,7 +267,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { // relatesTo filters are specified. relatesTo filters will then be // matched after fetching rows from HBase. Set relatesToCols = - TimelineFilterUtils.fetchColumnsFromFilterList(relatesTo); + HBaseTimelineFilterUtils.fetchColumnsFromFilterList(relatesTo); infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( EntityColumnPrefix.RELATES_TO, relatesToCols)); } @@ -275,7 +275,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) { // If IS_RELATED_TO field has to be retrieved, add a filter for fetching // columns with IS_RELATED_TO column prefix. - infoFamilyColsFilter.addFilter(TimelineFilterUtils + infoFamilyColsFilter.addFilter(HBaseTimelineFilterUtils .createHBaseQualifierFilter(CompareOp.EQUAL, EntityColumnPrefix.IS_RELATED_TO)); } else if (isRelatedTo != null && !isRelatedTo.getFilterList().isEmpty()) { @@ -284,7 +284,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { // isRelatedTo filters are specified. isRelatedTo filters will then be // matched after fetching rows from HBase. Set isRelatedToCols = - TimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo); + HBaseTimelineFilterUtils.fetchColumnsFromFilterList(isRelatedTo); infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( EntityColumnPrefix.IS_RELATED_TO, isRelatedToCols)); } @@ -293,7 +293,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { // If EVENTS field has to be retrieved, add a filter for fetching columns // with EVENT column prefix. infoFamilyColsFilter - .addFilter(TimelineFilterUtils.createHBaseQualifierFilter( + .addFilter(HBaseTimelineFilterUtils.createHBaseQualifierFilter( CompareOp.EQUAL, EntityColumnPrefix.EVENT)); } else if (eventFilters != null && !eventFilters.getFilterList().isEmpty()) { @@ -302,7 +302,7 @@ private FilterList createFilterListForColsOfInfoFamily() throws IOException { // event filters specified. Event filters will then be matched after // fetching rows from HBase. Set eventCols = - TimelineFilterUtils.fetchColumnsFromFilterList(eventFilters); + HBaseTimelineFilterUtils.fetchColumnsFromFilterList(eventFilters); infoFamilyColsFilter.addFilter(createFiltersFromColumnQualifiers( EntityColumnPrefix.EVENT, eventCols)); } @@ -320,25 +320,25 @@ private void excludeFieldsFromInfoColFamily(FilterList infoColFamilyList) { EnumSet fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve(); // Events not required. if (!hasField(fieldsToRetrieve, Field.EVENTS)) { - infoColFamilyList.addFilter(TimelineFilterUtils + infoColFamilyList.addFilter(HBaseTimelineFilterUtils .createHBaseQualifierFilter(CompareOp.NOT_EQUAL, EntityColumnPrefix.EVENT)); } // info not required. if (!hasField(fieldsToRetrieve, Field.INFO)) { - infoColFamilyList.addFilter(TimelineFilterUtils + infoColFamilyList.addFilter(HBaseTimelineFilterUtils .createHBaseQualifierFilter(CompareOp.NOT_EQUAL, EntityColumnPrefix.INFO)); } // is related to not required. if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) { - infoColFamilyList.addFilter(TimelineFilterUtils + infoColFamilyList.addFilter(HBaseTimelineFilterUtils .createHBaseQualifierFilter(CompareOp.NOT_EQUAL, EntityColumnPrefix.IS_RELATED_TO)); } // relates to not required. if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) { - infoColFamilyList.addFilter(TimelineFilterUtils + infoColFamilyList.addFilter(HBaseTimelineFilterUtils .createHBaseQualifierFilter(CompareOp.NOT_EQUAL, EntityColumnPrefix.RELATES_TO)); } @@ -357,7 +357,7 @@ private void updateFilterForConfsAndMetricsToRetrieve( // CONFS to fields to retrieve in augmentParams() even if not specified. if (dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS)) { // Create a filter list for configs. - listBasedOnFields.addFilter(TimelineFilterUtils + listBasedOnFields.addFilter(HBaseTimelineFilterUtils .createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getConfsToRetrieve(), EntityColumnFamily.CONFIGS, EntityColumnPrefix.CONFIG)); @@ -367,7 +367,7 @@ private void updateFilterForConfsAndMetricsToRetrieve( // METRICS to fields to retrieve in augmentParams() even if not specified. if (dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS)) { // Create a filter list for metrics. - listBasedOnFields.addFilter(TimelineFilterUtils + listBasedOnFields.addFilter(HBaseTimelineFilterUtils .createFilterForConfsOrMetricsToRetrieve( dataToRetrieve.getMetricsToRetrieve(), EntityColumnFamily.METRICS, EntityColumnPrefix.METRIC)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestKeyConverters.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestKeyConverters.java deleted file mode 100644 index 58df970..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestKeyConverters.java +++ /dev/null @@ -1,130 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.timelineservice.storage.common; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.junit.Test; - -public class TestKeyConverters { - - @Test - public void testAppIdKeyConverter() { - AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter(); - long currentTs = System.currentTimeMillis(); - ApplicationId appId1 = ApplicationId.newInstance(currentTs, 1); - ApplicationId appId2 = ApplicationId.newInstance(currentTs, 2); - ApplicationId appId3 = ApplicationId.newInstance(currentTs + 300, 1); - String appIdStr1 = appId1.toString(); - String appIdStr2 = appId2.toString(); - String appIdStr3 = appId3.toString(); - byte[] appIdBytes1 = appIdKeyConverter.encode(appIdStr1); - byte[] appIdBytes2 = appIdKeyConverter.encode(appIdStr2); - byte[] appIdBytes3 = appIdKeyConverter.encode(appIdStr3); - // App ids' should be encoded in a manner wherein descending order - // is maintained. - assertTrue( - "Ordering of app ids' is incorrect", - Bytes.compareTo(appIdBytes1, appIdBytes2) > 0 - && Bytes.compareTo(appIdBytes1, appIdBytes3) > 0 - && Bytes.compareTo(appIdBytes2, appIdBytes3) > 0); - String decodedAppId1 = appIdKeyConverter.decode(appIdBytes1); - String decodedAppId2 = appIdKeyConverter.decode(appIdBytes2); - String decodedAppId3 = appIdKeyConverter.decode(appIdBytes3); - assertTrue("Decoded app id is not same as the app id encoded", - appIdStr1.equals(decodedAppId1)); - assertTrue("Decoded app id is not same as the app id encoded", - appIdStr2.equals(decodedAppId2)); - assertTrue("Decoded app id is not same as the app id encoded", - appIdStr3.equals(decodedAppId3)); - } - - @Test - public void testEventColumnNameConverter() { - String eventId = "=foo_=eve=nt="; - byte[] valSepBytes = Bytes.toBytes(Separator.VALUES.getValue()); - byte[] maxByteArr = - Bytes.createMaxByteArray(Bytes.SIZEOF_LONG - valSepBytes.length); - byte[] ts = Bytes.add(valSepBytes, maxByteArr); - Long eventTs = Bytes.toLong(ts); - byte[] byteEventColName = - new EventColumnName(eventId, eventTs, null).getColumnQualifier(); - KeyConverter eventColumnNameConverter = - new EventColumnNameConverter(); - EventColumnName eventColName = - eventColumnNameConverter.decode(byteEventColName); - assertEquals(eventId, eventColName.getId()); - assertEquals(eventTs, eventColName.getTimestamp()); - assertNull(eventColName.getInfoKey()); - - String infoKey = "f=oo_event_in=fo=_key"; - byteEventColName = - new EventColumnName(eventId, eventTs, infoKey).getColumnQualifier(); - eventColName = eventColumnNameConverter.decode(byteEventColName); - assertEquals(eventId, eventColName.getId()); - assertEquals(eventTs, eventColName.getTimestamp()); - assertEquals(infoKey, eventColName.getInfoKey()); - } - - @Test - public void testLongKeyConverter() { - LongKeyConverter longKeyConverter = new LongKeyConverter(); - confirmLongKeyConverter(longKeyConverter, Long.MIN_VALUE); - confirmLongKeyConverter(longKeyConverter, -1234567890L); - confirmLongKeyConverter(longKeyConverter, -128L); - confirmLongKeyConverter(longKeyConverter, -127L); - confirmLongKeyConverter(longKeyConverter, -1L); - confirmLongKeyConverter(longKeyConverter, 0L); - confirmLongKeyConverter(longKeyConverter, 1L); - confirmLongKeyConverter(longKeyConverter, 127L); - confirmLongKeyConverter(longKeyConverter, 128L); - confirmLongKeyConverter(longKeyConverter, 1234567890L); - confirmLongKeyConverter(longKeyConverter, Long.MAX_VALUE); - } - - private void confirmLongKeyConverter(LongKeyConverter longKeyConverter, - Long testValue) { - Long decoded = longKeyConverter.decode(longKeyConverter.encode(testValue)); - assertEquals(testValue, decoded); - } - - @Test - public void testStringKeyConverter() { - StringKeyConverter stringKeyConverter = new StringKeyConverter(); - String phrase = "QuackAttack now!"; - - for (int i = 0; i < phrase.length(); i++) { - String sub = phrase.substring(i, phrase.length()); - confirmStrignKeyConverter(stringKeyConverter, sub); - confirmStrignKeyConverter(stringKeyConverter, sub + sub); - } - } - - private void confirmStrignKeyConverter(StringKeyConverter stringKeyConverter, - String testValue) { - String decoded = - stringKeyConverter.decode(stringKeyConverter.encode(testValue)); - assertEquals(testValue, decoded); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestRowKeys.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestRowKeys.java deleted file mode 100644 index 5beb189..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestRowKeys.java +++ /dev/null @@ -1,246 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.common; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey; -import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; -import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKeyPrefix; -import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey; -import org.junit.Test; - - -public class TestRowKeys { - - private final static String QUALIFIER_SEP = Separator.QUALIFIERS.getValue(); - private final static byte[] QUALIFIER_SEP_BYTES = Bytes - .toBytes(QUALIFIER_SEP); - private final static String CLUSTER = "cl" + QUALIFIER_SEP + "uster"; - private final static String USER = QUALIFIER_SEP + "user"; - private final static String FLOW_NAME = "dummy_" + QUALIFIER_SEP + "flow" - + QUALIFIER_SEP; - private final static Long FLOW_RUN_ID; - private final static String APPLICATION_ID; - static { - long runid = Long.MAX_VALUE - 900L; - byte[] longMaxByteArr = Bytes.toBytes(Long.MAX_VALUE); - byte[] byteArr = Bytes.toBytes(runid); - int sepByteLen = QUALIFIER_SEP_BYTES.length; - if (sepByteLen <= byteArr.length) { - for (int i = 0; i < sepByteLen; i++) { - byteArr[i] = (byte) (longMaxByteArr[i] - QUALIFIER_SEP_BYTES[i]); - } - } - FLOW_RUN_ID = Bytes.toLong(byteArr); - long clusterTs = System.currentTimeMillis(); - byteArr = Bytes.toBytes(clusterTs); - if (sepByteLen <= byteArr.length) { - for (int i = 0; i < sepByteLen; i++) { - byteArr[byteArr.length - sepByteLen + i] = - (byte) (longMaxByteArr[byteArr.length - sepByteLen + i] - - QUALIFIER_SEP_BYTES[i]); - } - } - clusterTs = Bytes.toLong(byteArr); - int seqId = 222; - APPLICATION_ID = ApplicationId.newInstance(clusterTs, seqId).toString(); - } - - private static void verifyRowPrefixBytes(byte[] byteRowKeyPrefix) { - int sepLen = QUALIFIER_SEP_BYTES.length; - for (int i = 0; i < sepLen; i++) { - assertTrue( - "Row key prefix not encoded properly.", - byteRowKeyPrefix[byteRowKeyPrefix.length - sepLen + i] == - QUALIFIER_SEP_BYTES[i]); - } - } - - @Test - public void testApplicationRowKey() { - byte[] byteRowKey = - new ApplicationRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, - APPLICATION_ID).getRowKey(); - ApplicationRowKey rowKey = ApplicationRowKey.parseRowKey(byteRowKey); - assertEquals(CLUSTER, rowKey.getClusterId()); - assertEquals(USER, rowKey.getUserId()); - assertEquals(FLOW_NAME, rowKey.getFlowName()); - assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId()); - assertEquals(APPLICATION_ID, rowKey.getAppId()); - - byte[] byteRowKeyPrefix = - new ApplicationRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID) - .getRowKeyPrefix(); - byte[][] splits = - Separator.QUALIFIERS.split(byteRowKeyPrefix, - new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - Separator.VARIABLE_SIZE}); - assertEquals(5, splits.length); - assertEquals(0, splits[4].length); - assertEquals(FLOW_NAME, - Separator.QUALIFIERS.decode(Bytes.toString(splits[2]))); - assertEquals(FLOW_RUN_ID, - (Long) LongConverter.invertLong(Bytes.toLong(splits[3]))); - verifyRowPrefixBytes(byteRowKeyPrefix); - - byteRowKeyPrefix = - new ApplicationRowKeyPrefix(CLUSTER, USER, FLOW_NAME).getRowKeyPrefix(); - splits = - Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] { - Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }); - assertEquals(4, splits.length); - assertEquals(0, splits[3].length); - assertEquals(FLOW_NAME, - Separator.QUALIFIERS.decode(Bytes.toString(splits[2]))); - verifyRowPrefixBytes(byteRowKeyPrefix); - } - - /** - * Tests the converters indirectly through the public methods of the - * corresponding rowkey. - */ - @Test - public void testAppToFlowRowKey() { - byte[] byteRowKey = new AppToFlowRowKey(CLUSTER, - APPLICATION_ID).getRowKey(); - AppToFlowRowKey rowKey = AppToFlowRowKey.parseRowKey(byteRowKey); - assertEquals(CLUSTER, rowKey.getClusterId()); - assertEquals(APPLICATION_ID, rowKey.getAppId()); - } - - @Test - public void testEntityRowKey() { - String entityId = "!ent!ity!!id!"; - String entityType = "entity!Type"; - byte[] byteRowKey = - new EntityRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, APPLICATION_ID, - entityType, entityId).getRowKey(); - EntityRowKey rowKey = EntityRowKey.parseRowKey(byteRowKey); - assertEquals(CLUSTER, rowKey.getClusterId()); - assertEquals(USER, rowKey.getUserId()); - assertEquals(FLOW_NAME, rowKey.getFlowName()); - assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId()); - assertEquals(APPLICATION_ID, rowKey.getAppId()); - assertEquals(entityType, rowKey.getEntityType()); - assertEquals(entityId, rowKey.getEntityId()); - - byte[] byteRowKeyPrefix = - new EntityRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, - APPLICATION_ID, entityType).getRowKeyPrefix(); - byte[][] splits = - Separator.QUALIFIERS.split( - byteRowKeyPrefix, - new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE}); - assertEquals(7, splits.length); - assertEquals(0, splits[6].length); - assertEquals(APPLICATION_ID, new AppIdKeyConverter().decode(splits[4])); - assertEquals(entityType, - Separator.QUALIFIERS.decode(Bytes.toString(splits[5]))); - verifyRowPrefixBytes(byteRowKeyPrefix); - - byteRowKeyPrefix = - new EntityRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, - APPLICATION_ID).getRowKeyPrefix(); - splits = - Separator.QUALIFIERS.split( - byteRowKeyPrefix, - new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE}); - assertEquals(6, splits.length); - assertEquals(0, splits[5].length); - AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter(); - assertEquals(APPLICATION_ID, appIdKeyConverter.decode(splits[4])); - verifyRowPrefixBytes(byteRowKeyPrefix); - } - - @Test - public void testFlowActivityRowKey() { - Long ts = 1459900830000L; - Long dayTimestamp = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts); - byte[] byteRowKey = - new FlowActivityRowKey(CLUSTER, ts, USER, FLOW_NAME).getRowKey(); - FlowActivityRowKey rowKey = FlowActivityRowKey.parseRowKey(byteRowKey); - assertEquals(CLUSTER, rowKey.getClusterId()); - assertEquals(dayTimestamp, rowKey.getDayTimestamp()); - assertEquals(USER, rowKey.getUserId()); - assertEquals(FLOW_NAME, rowKey.getFlowName()); - - byte[] byteRowKeyPrefix = - new FlowActivityRowKeyPrefix(CLUSTER).getRowKeyPrefix(); - byte[][] splits = - Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] { - Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }); - assertEquals(2, splits.length); - assertEquals(0, splits[1].length); - assertEquals(CLUSTER, - Separator.QUALIFIERS.decode(Bytes.toString(splits[0]))); - verifyRowPrefixBytes(byteRowKeyPrefix); - - byteRowKeyPrefix = - new FlowActivityRowKeyPrefix(CLUSTER, ts).getRowKeyPrefix(); - splits = - Separator.QUALIFIERS.split(byteRowKeyPrefix, - new int[] {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - Separator.VARIABLE_SIZE}); - assertEquals(3, splits.length); - assertEquals(0, splits[2].length); - assertEquals(CLUSTER, - Separator.QUALIFIERS.decode(Bytes.toString(splits[0]))); - assertEquals(ts, - (Long) LongConverter.invertLong(Bytes.toLong(splits[1]))); - verifyRowPrefixBytes(byteRowKeyPrefix); - } - - @Test - public void testFlowRunRowKey() { - byte[] byteRowKey = - new FlowRunRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID).getRowKey(); - FlowRunRowKey rowKey = FlowRunRowKey.parseRowKey(byteRowKey); - assertEquals(CLUSTER, rowKey.getClusterId()); - assertEquals(USER, rowKey.getUserId()); - assertEquals(FLOW_NAME, rowKey.getFlowName()); - assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId()); - - byte[] byteRowKeyPrefix = - new FlowRunRowKey(CLUSTER, USER, FLOW_NAME, null).getRowKey(); - byte[][] splits = - Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] { - Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }); - assertEquals(4, splits.length); - assertEquals(0, splits[3].length); - assertEquals(FLOW_NAME, - Separator.QUALIFIERS.decode(Bytes.toString(splits[2]))); - verifyRowPrefixBytes(byteRowKeyPrefix); - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestSeparator.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestSeparator.java deleted file mode 100644 index 7d37206..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/common/TestSeparator.java +++ /dev/null @@ -1,215 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.yarn.server.timelineservice.storage.common; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; - -import com.google.common.collect.Iterables; - -public class TestSeparator { - - private static String villain = "Dr. Heinz Doofenshmirtz"; - private static String special = - ". * | ? + \t ( ) [ ] { } ^ $ \\ \" %"; - - /** - * - */ - @Test - public void testEncodeDecodeString() { - - for (Separator separator : Separator.values()) { - testEncodeDecode(separator, ""); - testEncodeDecode(separator, " "); - testEncodeDecode(separator, "!"); - testEncodeDecode(separator, "?"); - testEncodeDecode(separator, "&"); - testEncodeDecode(separator, "+"); - testEncodeDecode(separator, "\t"); - testEncodeDecode(separator, "Dr."); - testEncodeDecode(separator, "Heinz"); - testEncodeDecode(separator, "Doofenshmirtz"); - testEncodeDecode(separator, villain); - testEncodeDecode(separator, special); - - assertNull(separator.encode(null)); - - } - } - - private void testEncodeDecode(Separator separator, String token) { - String encoded = separator.encode(token); - String decoded = separator.decode(encoded); - String msg = "token:" + token + " separator:" + separator + "."; - assertEquals(msg, token, decoded); - } - - @Test - public void testEncodeDecode() { - testEncodeDecode("Dr.", Separator.QUALIFIERS); - testEncodeDecode("Heinz", Separator.QUALIFIERS, Separator.QUALIFIERS); - testEncodeDecode("Doofenshmirtz", Separator.QUALIFIERS, null, - Separator.QUALIFIERS); - testEncodeDecode("&Perry", Separator.QUALIFIERS, Separator.VALUES, null); - testEncodeDecode("the ", Separator.QUALIFIERS, Separator.SPACE); - testEncodeDecode("Platypus...", (Separator) null); - testEncodeDecode("The what now ?!?", Separator.QUALIFIERS, - Separator.VALUES, Separator.SPACE); - - } - @Test - public void testEncodedValues() { - testEncodeDecode("Double-escape %2$ and %9$ or %%2$ or %%3$, nor %%%2$" + - "= no problem!", - Separator.QUALIFIERS, Separator.VALUES, Separator.SPACE, Separator.TAB); - } - - @Test - public void testSplits() { - byte[] maxLongBytes = Bytes.toBytes(Long.MAX_VALUE); - byte[] maxIntBytes = Bytes.toBytes(Integer.MAX_VALUE); - for (Separator separator : Separator.values()) { - String str1 = "cl" + separator.getValue() + "us"; - String str2 = separator.getValue() + "rst"; - byte[] sepByteArr = Bytes.toBytes(separator.getValue()); - byte[] longVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxLongBytes, - sepByteArr.length, Bytes.SIZEOF_LONG - sepByteArr.length)); - byte[] intVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxIntBytes, - sepByteArr.length, Bytes.SIZEOF_INT - sepByteArr.length)); - byte[] arr = separator.join( - Bytes.toBytes(separator.encode(str1)), longVal1Arr, - Bytes.toBytes(separator.encode(str2)), intVal1Arr); - int[] sizes = {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - Separator.VARIABLE_SIZE, Bytes.SIZEOF_INT}; - byte[][] splits = separator.split(arr, sizes); - assertEquals(4, splits.length); - assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); - assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1])); - assertEquals(str2, separator.decode(Bytes.toString(splits[2]))); - assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3])); - - longVal1Arr = Bytes.add(Bytes.copy(maxLongBytes, 0, Bytes.SIZEOF_LONG - - sepByteArr.length), sepByteArr); - intVal1Arr = Bytes.add(Bytes.copy(maxIntBytes, 0, Bytes.SIZEOF_INT - - sepByteArr.length), sepByteArr); - arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr, - Bytes.toBytes(separator.encode(str2)), intVal1Arr); - splits = separator.split(arr, sizes); - assertEquals(4, splits.length); - assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); - assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1])); - assertEquals(str2, separator.decode(Bytes.toString(splits[2]))); - assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3])); - - longVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxLongBytes, - sepByteArr.length, 4 - sepByteArr.length), sepByteArr); - longVal1Arr = Bytes.add(longVal1Arr, Bytes.copy(maxLongBytes, 4, 3 - - sepByteArr.length), sepByteArr); - arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr, - Bytes.toBytes(separator.encode(str2)), intVal1Arr); - splits = separator.split(arr, sizes); - assertEquals(4, splits.length); - assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); - assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1])); - assertEquals(str2, separator.decode(Bytes.toString(splits[2]))); - assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3])); - - arr = separator.join(Bytes.toBytes(separator.encode(str1)), - Bytes.toBytes(separator.encode(str2)), intVal1Arr, longVal1Arr); - int[] sizes1 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Bytes.SIZEOF_INT, Bytes.SIZEOF_LONG}; - splits = separator.split(arr, sizes1); - assertEquals(4, splits.length); - assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); - assertEquals(str2, separator.decode(Bytes.toString(splits[1]))); - assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[2])); - assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[3])); - - try { - int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Bytes.SIZEOF_INT, 7}; - splits = separator.split(arr, sizes2); - fail("Exception should have been thrown."); - } catch (IllegalArgumentException e) {} - - try { - int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, 2, - Bytes.SIZEOF_LONG}; - splits = separator.split(arr, sizes2); - fail("Exception should have been thrown."); - } catch (IllegalArgumentException e) {} - } - } - - /** - * Simple test to encode and decode using the same separators and confirm that - * we end up with the same as what we started with. - * - * @param token - * @param separators - */ - private static void testEncodeDecode(String token, Separator... separators) { - byte[] encoded = Separator.encode(token, separators); - String decoded = Separator.decode(encoded, separators); - assertEquals(token, decoded); - } - - @Test - public void testJoinStripped() { - List stringList = new ArrayList(0); - stringList.add("nothing"); - - String joined = Separator.VALUES.joinEncoded(stringList); - Iterable split = Separator.VALUES.splitEncoded(joined); - assertTrue(Iterables.elementsEqual(stringList, split)); - - stringList = new ArrayList(3); - stringList.add("a"); - stringList.add("b?"); - stringList.add("c"); - - joined = Separator.VALUES.joinEncoded(stringList); - split = Separator.VALUES.splitEncoded(joined); - assertTrue(Iterables.elementsEqual(stringList, split)); - - String[] stringArray1 = {"else"}; - joined = Separator.VALUES.joinEncoded(stringArray1); - split = Separator.VALUES.splitEncoded(joined); - assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray1), split)); - - String[] stringArray2 = {"d", "e?", "f"}; - joined = Separator.VALUES.joinEncoded(stringArray2); - split = Separator.VALUES.splitEncoded(joined); - assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray2), split)); - - List empty = new ArrayList(0); - split = Separator.VALUES.splitEncoded(null); - assertTrue(Iterables.elementsEqual(empty, split)); - - } - -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java new file mode 100644 index 0000000..58df970 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java @@ -0,0 +1,130 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.junit.Test; + +public class TestKeyConverters { + + @Test + public void testAppIdKeyConverter() { + AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter(); + long currentTs = System.currentTimeMillis(); + ApplicationId appId1 = ApplicationId.newInstance(currentTs, 1); + ApplicationId appId2 = ApplicationId.newInstance(currentTs, 2); + ApplicationId appId3 = ApplicationId.newInstance(currentTs + 300, 1); + String appIdStr1 = appId1.toString(); + String appIdStr2 = appId2.toString(); + String appIdStr3 = appId3.toString(); + byte[] appIdBytes1 = appIdKeyConverter.encode(appIdStr1); + byte[] appIdBytes2 = appIdKeyConverter.encode(appIdStr2); + byte[] appIdBytes3 = appIdKeyConverter.encode(appIdStr3); + // App ids' should be encoded in a manner wherein descending order + // is maintained. + assertTrue( + "Ordering of app ids' is incorrect", + Bytes.compareTo(appIdBytes1, appIdBytes2) > 0 + && Bytes.compareTo(appIdBytes1, appIdBytes3) > 0 + && Bytes.compareTo(appIdBytes2, appIdBytes3) > 0); + String decodedAppId1 = appIdKeyConverter.decode(appIdBytes1); + String decodedAppId2 = appIdKeyConverter.decode(appIdBytes2); + String decodedAppId3 = appIdKeyConverter.decode(appIdBytes3); + assertTrue("Decoded app id is not same as the app id encoded", + appIdStr1.equals(decodedAppId1)); + assertTrue("Decoded app id is not same as the app id encoded", + appIdStr2.equals(decodedAppId2)); + assertTrue("Decoded app id is not same as the app id encoded", + appIdStr3.equals(decodedAppId3)); + } + + @Test + public void testEventColumnNameConverter() { + String eventId = "=foo_=eve=nt="; + byte[] valSepBytes = Bytes.toBytes(Separator.VALUES.getValue()); + byte[] maxByteArr = + Bytes.createMaxByteArray(Bytes.SIZEOF_LONG - valSepBytes.length); + byte[] ts = Bytes.add(valSepBytes, maxByteArr); + Long eventTs = Bytes.toLong(ts); + byte[] byteEventColName = + new EventColumnName(eventId, eventTs, null).getColumnQualifier(); + KeyConverter eventColumnNameConverter = + new EventColumnNameConverter(); + EventColumnName eventColName = + eventColumnNameConverter.decode(byteEventColName); + assertEquals(eventId, eventColName.getId()); + assertEquals(eventTs, eventColName.getTimestamp()); + assertNull(eventColName.getInfoKey()); + + String infoKey = "f=oo_event_in=fo=_key"; + byteEventColName = + new EventColumnName(eventId, eventTs, infoKey).getColumnQualifier(); + eventColName = eventColumnNameConverter.decode(byteEventColName); + assertEquals(eventId, eventColName.getId()); + assertEquals(eventTs, eventColName.getTimestamp()); + assertEquals(infoKey, eventColName.getInfoKey()); + } + + @Test + public void testLongKeyConverter() { + LongKeyConverter longKeyConverter = new LongKeyConverter(); + confirmLongKeyConverter(longKeyConverter, Long.MIN_VALUE); + confirmLongKeyConverter(longKeyConverter, -1234567890L); + confirmLongKeyConverter(longKeyConverter, -128L); + confirmLongKeyConverter(longKeyConverter, -127L); + confirmLongKeyConverter(longKeyConverter, -1L); + confirmLongKeyConverter(longKeyConverter, 0L); + confirmLongKeyConverter(longKeyConverter, 1L); + confirmLongKeyConverter(longKeyConverter, 127L); + confirmLongKeyConverter(longKeyConverter, 128L); + confirmLongKeyConverter(longKeyConverter, 1234567890L); + confirmLongKeyConverter(longKeyConverter, Long.MAX_VALUE); + } + + private void confirmLongKeyConverter(LongKeyConverter longKeyConverter, + Long testValue) { + Long decoded = longKeyConverter.decode(longKeyConverter.encode(testValue)); + assertEquals(testValue, decoded); + } + + @Test + public void testStringKeyConverter() { + StringKeyConverter stringKeyConverter = new StringKeyConverter(); + String phrase = "QuackAttack now!"; + + for (int i = 0; i < phrase.length(); i++) { + String sub = phrase.substring(i, phrase.length()); + confirmStrignKeyConverter(stringKeyConverter, sub); + confirmStrignKeyConverter(stringKeyConverter, sub + sub); + } + } + + private void confirmStrignKeyConverter(StringKeyConverter stringKeyConverter, + String testValue) { + String decoded = + stringKeyConverter.decode(stringKeyConverter.encode(testValue)); + assertEquals(testValue, decoded); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java new file mode 100644 index 0000000..5beb189 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java @@ -0,0 +1,246 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey; +import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKeyPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey; +import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKeyPrefix; +import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey; +import org.junit.Test; + + +public class TestRowKeys { + + private final static String QUALIFIER_SEP = Separator.QUALIFIERS.getValue(); + private final static byte[] QUALIFIER_SEP_BYTES = Bytes + .toBytes(QUALIFIER_SEP); + private final static String CLUSTER = "cl" + QUALIFIER_SEP + "uster"; + private final static String USER = QUALIFIER_SEP + "user"; + private final static String FLOW_NAME = "dummy_" + QUALIFIER_SEP + "flow" + + QUALIFIER_SEP; + private final static Long FLOW_RUN_ID; + private final static String APPLICATION_ID; + static { + long runid = Long.MAX_VALUE - 900L; + byte[] longMaxByteArr = Bytes.toBytes(Long.MAX_VALUE); + byte[] byteArr = Bytes.toBytes(runid); + int sepByteLen = QUALIFIER_SEP_BYTES.length; + if (sepByteLen <= byteArr.length) { + for (int i = 0; i < sepByteLen; i++) { + byteArr[i] = (byte) (longMaxByteArr[i] - QUALIFIER_SEP_BYTES[i]); + } + } + FLOW_RUN_ID = Bytes.toLong(byteArr); + long clusterTs = System.currentTimeMillis(); + byteArr = Bytes.toBytes(clusterTs); + if (sepByteLen <= byteArr.length) { + for (int i = 0; i < sepByteLen; i++) { + byteArr[byteArr.length - sepByteLen + i] = + (byte) (longMaxByteArr[byteArr.length - sepByteLen + i] - + QUALIFIER_SEP_BYTES[i]); + } + } + clusterTs = Bytes.toLong(byteArr); + int seqId = 222; + APPLICATION_ID = ApplicationId.newInstance(clusterTs, seqId).toString(); + } + + private static void verifyRowPrefixBytes(byte[] byteRowKeyPrefix) { + int sepLen = QUALIFIER_SEP_BYTES.length; + for (int i = 0; i < sepLen; i++) { + assertTrue( + "Row key prefix not encoded properly.", + byteRowKeyPrefix[byteRowKeyPrefix.length - sepLen + i] == + QUALIFIER_SEP_BYTES[i]); + } + } + + @Test + public void testApplicationRowKey() { + byte[] byteRowKey = + new ApplicationRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, + APPLICATION_ID).getRowKey(); + ApplicationRowKey rowKey = ApplicationRowKey.parseRowKey(byteRowKey); + assertEquals(CLUSTER, rowKey.getClusterId()); + assertEquals(USER, rowKey.getUserId()); + assertEquals(FLOW_NAME, rowKey.getFlowName()); + assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId()); + assertEquals(APPLICATION_ID, rowKey.getAppId()); + + byte[] byteRowKeyPrefix = + new ApplicationRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID) + .getRowKeyPrefix(); + byte[][] splits = + Separator.QUALIFIERS.split(byteRowKeyPrefix, + new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + Separator.VARIABLE_SIZE}); + assertEquals(5, splits.length); + assertEquals(0, splits[4].length); + assertEquals(FLOW_NAME, + Separator.QUALIFIERS.decode(Bytes.toString(splits[2]))); + assertEquals(FLOW_RUN_ID, + (Long) LongConverter.invertLong(Bytes.toLong(splits[3]))); + verifyRowPrefixBytes(byteRowKeyPrefix); + + byteRowKeyPrefix = + new ApplicationRowKeyPrefix(CLUSTER, USER, FLOW_NAME).getRowKeyPrefix(); + splits = + Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] { + Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }); + assertEquals(4, splits.length); + assertEquals(0, splits[3].length); + assertEquals(FLOW_NAME, + Separator.QUALIFIERS.decode(Bytes.toString(splits[2]))); + verifyRowPrefixBytes(byteRowKeyPrefix); + } + + /** + * Tests the converters indirectly through the public methods of the + * corresponding rowkey. + */ + @Test + public void testAppToFlowRowKey() { + byte[] byteRowKey = new AppToFlowRowKey(CLUSTER, + APPLICATION_ID).getRowKey(); + AppToFlowRowKey rowKey = AppToFlowRowKey.parseRowKey(byteRowKey); + assertEquals(CLUSTER, rowKey.getClusterId()); + assertEquals(APPLICATION_ID, rowKey.getAppId()); + } + + @Test + public void testEntityRowKey() { + String entityId = "!ent!ity!!id!"; + String entityType = "entity!Type"; + byte[] byteRowKey = + new EntityRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, APPLICATION_ID, + entityType, entityId).getRowKey(); + EntityRowKey rowKey = EntityRowKey.parseRowKey(byteRowKey); + assertEquals(CLUSTER, rowKey.getClusterId()); + assertEquals(USER, rowKey.getUserId()); + assertEquals(FLOW_NAME, rowKey.getFlowName()); + assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId()); + assertEquals(APPLICATION_ID, rowKey.getAppId()); + assertEquals(entityType, rowKey.getEntityType()); + assertEquals(entityId, rowKey.getEntityId()); + + byte[] byteRowKeyPrefix = + new EntityRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, + APPLICATION_ID, entityType).getRowKeyPrefix(); + byte[][] splits = + Separator.QUALIFIERS.split( + byteRowKeyPrefix, + new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE, + Separator.VARIABLE_SIZE}); + assertEquals(7, splits.length); + assertEquals(0, splits[6].length); + assertEquals(APPLICATION_ID, new AppIdKeyConverter().decode(splits[4])); + assertEquals(entityType, + Separator.QUALIFIERS.decode(Bytes.toString(splits[5]))); + verifyRowPrefixBytes(byteRowKeyPrefix); + + byteRowKeyPrefix = + new EntityRowKeyPrefix(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID, + APPLICATION_ID).getRowKeyPrefix(); + splits = + Separator.QUALIFIERS.split( + byteRowKeyPrefix, + new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE}); + assertEquals(6, splits.length); + assertEquals(0, splits[5].length); + AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter(); + assertEquals(APPLICATION_ID, appIdKeyConverter.decode(splits[4])); + verifyRowPrefixBytes(byteRowKeyPrefix); + } + + @Test + public void testFlowActivityRowKey() { + Long ts = 1459900830000L; + Long dayTimestamp = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts); + byte[] byteRowKey = + new FlowActivityRowKey(CLUSTER, ts, USER, FLOW_NAME).getRowKey(); + FlowActivityRowKey rowKey = FlowActivityRowKey.parseRowKey(byteRowKey); + assertEquals(CLUSTER, rowKey.getClusterId()); + assertEquals(dayTimestamp, rowKey.getDayTimestamp()); + assertEquals(USER, rowKey.getUserId()); + assertEquals(FLOW_NAME, rowKey.getFlowName()); + + byte[] byteRowKeyPrefix = + new FlowActivityRowKeyPrefix(CLUSTER).getRowKeyPrefix(); + byte[][] splits = + Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] { + Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }); + assertEquals(2, splits.length); + assertEquals(0, splits[1].length); + assertEquals(CLUSTER, + Separator.QUALIFIERS.decode(Bytes.toString(splits[0]))); + verifyRowPrefixBytes(byteRowKeyPrefix); + + byteRowKeyPrefix = + new FlowActivityRowKeyPrefix(CLUSTER, ts).getRowKeyPrefix(); + splits = + Separator.QUALIFIERS.split(byteRowKeyPrefix, + new int[] {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + Separator.VARIABLE_SIZE}); + assertEquals(3, splits.length); + assertEquals(0, splits[2].length); + assertEquals(CLUSTER, + Separator.QUALIFIERS.decode(Bytes.toString(splits[0]))); + assertEquals(ts, + (Long) LongConverter.invertLong(Bytes.toLong(splits[1]))); + verifyRowPrefixBytes(byteRowKeyPrefix); + } + + @Test + public void testFlowRunRowKey() { + byte[] byteRowKey = + new FlowRunRowKey(CLUSTER, USER, FLOW_NAME, FLOW_RUN_ID).getRowKey(); + FlowRunRowKey rowKey = FlowRunRowKey.parseRowKey(byteRowKey); + assertEquals(CLUSTER, rowKey.getClusterId()); + assertEquals(USER, rowKey.getUserId()); + assertEquals(FLOW_NAME, rowKey.getFlowName()); + assertEquals(FLOW_RUN_ID, rowKey.getFlowRunId()); + + byte[] byteRowKeyPrefix = + new FlowRunRowKey(CLUSTER, USER, FLOW_NAME, null).getRowKey(); + byte[][] splits = + Separator.QUALIFIERS.split(byteRowKeyPrefix, new int[] { + Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE }); + assertEquals(4, splits.length); + assertEquals(0, splits[3].length); + assertEquals(FLOW_NAME, + Separator.QUALIFIERS.decode(Bytes.toString(splits[2]))); + verifyRowPrefixBytes(byteRowKeyPrefix); + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java new file mode 100644 index 0000000..7d37206 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java @@ -0,0 +1,215 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.yarn.server.timelineservice.storage.common; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Test; + +import com.google.common.collect.Iterables; + +public class TestSeparator { + + private static String villain = "Dr. Heinz Doofenshmirtz"; + private static String special = + ". * | ? + \t ( ) [ ] { } ^ $ \\ \" %"; + + /** + * + */ + @Test + public void testEncodeDecodeString() { + + for (Separator separator : Separator.values()) { + testEncodeDecode(separator, ""); + testEncodeDecode(separator, " "); + testEncodeDecode(separator, "!"); + testEncodeDecode(separator, "?"); + testEncodeDecode(separator, "&"); + testEncodeDecode(separator, "+"); + testEncodeDecode(separator, "\t"); + testEncodeDecode(separator, "Dr."); + testEncodeDecode(separator, "Heinz"); + testEncodeDecode(separator, "Doofenshmirtz"); + testEncodeDecode(separator, villain); + testEncodeDecode(separator, special); + + assertNull(separator.encode(null)); + + } + } + + private void testEncodeDecode(Separator separator, String token) { + String encoded = separator.encode(token); + String decoded = separator.decode(encoded); + String msg = "token:" + token + " separator:" + separator + "."; + assertEquals(msg, token, decoded); + } + + @Test + public void testEncodeDecode() { + testEncodeDecode("Dr.", Separator.QUALIFIERS); + testEncodeDecode("Heinz", Separator.QUALIFIERS, Separator.QUALIFIERS); + testEncodeDecode("Doofenshmirtz", Separator.QUALIFIERS, null, + Separator.QUALIFIERS); + testEncodeDecode("&Perry", Separator.QUALIFIERS, Separator.VALUES, null); + testEncodeDecode("the ", Separator.QUALIFIERS, Separator.SPACE); + testEncodeDecode("Platypus...", (Separator) null); + testEncodeDecode("The what now ?!?", Separator.QUALIFIERS, + Separator.VALUES, Separator.SPACE); + + } + @Test + public void testEncodedValues() { + testEncodeDecode("Double-escape %2$ and %9$ or %%2$ or %%3$, nor %%%2$" + + "= no problem!", + Separator.QUALIFIERS, Separator.VALUES, Separator.SPACE, Separator.TAB); + } + + @Test + public void testSplits() { + byte[] maxLongBytes = Bytes.toBytes(Long.MAX_VALUE); + byte[] maxIntBytes = Bytes.toBytes(Integer.MAX_VALUE); + for (Separator separator : Separator.values()) { + String str1 = "cl" + separator.getValue() + "us"; + String str2 = separator.getValue() + "rst"; + byte[] sepByteArr = Bytes.toBytes(separator.getValue()); + byte[] longVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxLongBytes, + sepByteArr.length, Bytes.SIZEOF_LONG - sepByteArr.length)); + byte[] intVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxIntBytes, + sepByteArr.length, Bytes.SIZEOF_INT - sepByteArr.length)); + byte[] arr = separator.join( + Bytes.toBytes(separator.encode(str1)), longVal1Arr, + Bytes.toBytes(separator.encode(str2)), intVal1Arr); + int[] sizes = {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + Separator.VARIABLE_SIZE, Bytes.SIZEOF_INT}; + byte[][] splits = separator.split(arr, sizes); + assertEquals(4, splits.length); + assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); + assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1])); + assertEquals(str2, separator.decode(Bytes.toString(splits[2]))); + assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3])); + + longVal1Arr = Bytes.add(Bytes.copy(maxLongBytes, 0, Bytes.SIZEOF_LONG - + sepByteArr.length), sepByteArr); + intVal1Arr = Bytes.add(Bytes.copy(maxIntBytes, 0, Bytes.SIZEOF_INT - + sepByteArr.length), sepByteArr); + arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr, + Bytes.toBytes(separator.encode(str2)), intVal1Arr); + splits = separator.split(arr, sizes); + assertEquals(4, splits.length); + assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); + assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1])); + assertEquals(str2, separator.decode(Bytes.toString(splits[2]))); + assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3])); + + longVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxLongBytes, + sepByteArr.length, 4 - sepByteArr.length), sepByteArr); + longVal1Arr = Bytes.add(longVal1Arr, Bytes.copy(maxLongBytes, 4, 3 - + sepByteArr.length), sepByteArr); + arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr, + Bytes.toBytes(separator.encode(str2)), intVal1Arr); + splits = separator.split(arr, sizes); + assertEquals(4, splits.length); + assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); + assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[1])); + assertEquals(str2, separator.decode(Bytes.toString(splits[2]))); + assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[3])); + + arr = separator.join(Bytes.toBytes(separator.encode(str1)), + Bytes.toBytes(separator.encode(str2)), intVal1Arr, longVal1Arr); + int[] sizes1 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Bytes.SIZEOF_INT, Bytes.SIZEOF_LONG}; + splits = separator.split(arr, sizes1); + assertEquals(4, splits.length); + assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); + assertEquals(str2, separator.decode(Bytes.toString(splits[1]))); + assertEquals(Bytes.toInt(intVal1Arr), Bytes.toInt(splits[2])); + assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[3])); + + try { + int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Bytes.SIZEOF_INT, 7}; + splits = separator.split(arr, sizes2); + fail("Exception should have been thrown."); + } catch (IllegalArgumentException e) {} + + try { + int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, 2, + Bytes.SIZEOF_LONG}; + splits = separator.split(arr, sizes2); + fail("Exception should have been thrown."); + } catch (IllegalArgumentException e) {} + } + } + + /** + * Simple test to encode and decode using the same separators and confirm that + * we end up with the same as what we started with. + * + * @param token + * @param separators + */ + private static void testEncodeDecode(String token, Separator... separators) { + byte[] encoded = Separator.encode(token, separators); + String decoded = Separator.decode(encoded, separators); + assertEquals(token, decoded); + } + + @Test + public void testJoinStripped() { + List stringList = new ArrayList(0); + stringList.add("nothing"); + + String joined = Separator.VALUES.joinEncoded(stringList); + Iterable split = Separator.VALUES.splitEncoded(joined); + assertTrue(Iterables.elementsEqual(stringList, split)); + + stringList = new ArrayList(3); + stringList.add("a"); + stringList.add("b?"); + stringList.add("c"); + + joined = Separator.VALUES.joinEncoded(stringList); + split = Separator.VALUES.splitEncoded(joined); + assertTrue(Iterables.elementsEqual(stringList, split)); + + String[] stringArray1 = {"else"}; + joined = Separator.VALUES.joinEncoded(stringArray1); + split = Separator.VALUES.splitEncoded(joined); + assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray1), split)); + + String[] stringArray2 = {"d", "e?", "f"}; + joined = Separator.VALUES.joinEncoded(stringArray2); + split = Separator.VALUES.splitEncoded(joined); + assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray2), split)); + + List empty = new ArrayList(0); + split = Separator.VALUES.splitEncoded(null); + assertTrue(Iterables.elementsEqual(empty, split)); + + } + +}