diff --git hbase-native-client/core/BUCK hbase-native-client/core/BUCK index d8d15a9..ce0c733 100644 --- hbase-native-client/core/BUCK +++ hbase-native-client/core/BUCK @@ -22,6 +22,8 @@ cxx_library( "client.h", "cell.h", "hbase_macros.h", + "filter.h", + "query.h", "keyvalue-codec.h", "region-location.h", "location-cache.h", @@ -78,6 +80,16 @@ cxx_test( deps=[":core",], run_test_separately=True,) cxx_test( + name="filter-test", + srcs=["filter-test.cc",], + deps=[ + ":core", + "//if:if", + "//serde:serde", + "//test-util:test-util", + ], + run_test_separately=True,) +cxx_test( name="get-test", srcs=["get-test.cc",], deps=[":core",], diff --git hbase-native-client/core/filter-test.cc hbase-native-client/core/filter-test.cc new file mode 100644 index 0000000..ff683b6 --- /dev/null +++ hbase-native-client/core/filter-test.cc @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#include +#include "core/client.h" +#include "core/configuration.h" +#include "core/get.h" +#include "core/result.h" +#include "core/table.h" +#include "if/Comparator.pb.h" +#include "if/HBase.pb.h" +#include "serde/table-name.h" +#include "test-util/test-util.h" + +using hbase::Configuration; +using hbase::Get; +using hbase::FilterFactory; +using hbase::Table; +using hbase::TestUtil; +using hbase::pb::CompareType; +using hbase::ComparatorFactory; +using hbase::Comparator; + +class FilterTest : public ::testing::Test { + protected: + static void SetUpTestCase() { test_util_ = std::make_unique(); } + + static void TearDownTestCase() { test_util_.release(); } + + virtual void SetUp() {} + virtual void TearDown() {} + + static std::unique_ptr test_util_; +}; + +std::unique_ptr FilterTest::test_util_ = nullptr; + +TEST_F(FilterTest, GetWithColumnPrefixFilter) { + // write row1 with 3 columns (column_1, column_2, and foo_column) + FilterTest::test_util_->RunShellCmd( + "create 't', 'd'; put 't', 'row1', 'd:column_1', 'value1'; put 't', 'row1', 'd:column_2', " + "'value2'; put 't', 'row1', 'd:foo_column', 'value3'"); + + // Create TableName and Row to be fetched from HBase + auto tn = folly::to("t"); + auto row = "row1"; + + // Gets to be performed on above HBase Table + Get get_all(row); // expected to return all 3 columns + Get get_one(row); // expected to return 1 column + Get get_two(row); // expected to return 2 column + + get_one.SetFilter(FilterFactory::ColumnPrefixFilter("foo_")); + get_two.SetFilter(FilterFactory::ColumnPrefixFilter("column_")); + + // Create a client + hbase::Client client(Configuration{}); + + // Get connection to HBase Table + auto table = client.Table(tn); + ASSERT_TRUE(table) << "Unable to get connection to Table."; + + // Perform the Get + auto result_all = table->Get(get_all); + auto result_one = table->Get(get_one); + auto result_two = table->Get(get_two); + + table->Close(); + client.Close(); + + // Test the values + ASSERT_TRUE(!result_one->IsEmpty()) << "Result shouldn't be empty."; + ASSERT_TRUE(!result_two->IsEmpty()) << "Result shouldn't be empty."; + ASSERT_TRUE(!result_all->IsEmpty()) << "Result shouldn't be empty."; + EXPECT_EQ(row, result_one->Row()); + EXPECT_EQ(row, result_two->Row()); + EXPECT_EQ(row, result_all->Row()); + EXPECT_EQ(1, result_one->Size()); + EXPECT_EQ(2, result_two->Size()); + EXPECT_EQ(3, result_all->Size()); + EXPECT_EQ("value3", *(result_one->Value("d", "foo_column"))); + EXPECT_EQ("value1", *(result_two->Value("d", "column_1"))); + EXPECT_EQ("value2", *(result_two->Value("d", "column_2"))); +} + +TEST_F(FilterTest, GetWithQualifierFilter) { + // write row1 with 3 columns (a,b,c) + FilterTest::test_util_->RunShellCmd( + "create 't1', 'd'; put 't1', 'row1', 'd:a', 'value1'; put 't1', 'row1', 'd:b', " + "'value2'; put 't1', 'row1', 'd:c', 'value3'"); + + // Create TableName and Row to be fetched from HBase + auto tn = folly::to("t1"); + auto row = "row1"; + + // Gets to be performed on above HBase Table + Get get(row); + get.SetFilter(FilterFactory::QualifierFilter(CompareType::GREATER_OR_EQUAL, + *ComparatorFactory::BinaryComparator("b"))); + + // Create a client + hbase::Client client(Configuration{}); + + // Get connection to HBase Table + auto table = client.Table(tn); + ASSERT_TRUE(table) << "Unable to get connection to Table."; + + // Perform the Get + auto result = table->Get(get); + + table->Close(); + client.Close(); + + // Test the values + ASSERT_TRUE(!result->IsEmpty()) << "Result shouldn't be empty."; + EXPECT_EQ(row, result->Row()); + EXPECT_EQ(2, result->Size()); + EXPECT_EQ("value2", *(result->Value("d", "b"))); + EXPECT_EQ("value3", *(result->Value("d", "c"))); +} diff --git hbase-native-client/core/filter.h hbase-native-client/core/filter.h new file mode 100644 index 0000000..b5b7133 --- /dev/null +++ hbase-native-client/core/filter.h @@ -0,0 +1,481 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include "if/Comparator.pb.h" +#include "if/Filter.pb.h" +#include "if/HBase.pb.h" + +using google::protobuf::Message; + +namespace hbase { + +/** + * In C++ Client, Filter is a thin wrapper for calling filters defined as a Java class. The actual + * filtering logic is not implemented here, but this class provides a mechanism to call + * pre-existing Filter classes (like KeyOnlyFilter, SingleColumnValueFilter, etc) with your Get or + * Scan RPCs. This class can also be used to call custom Filters defined as a Java class, or + * pre-existing Filters not defined below. Some of the interfaces depends on protobuf classes + * defined in HBase.proto, Filter.proto and Comparator.proto. + * + * Consult the Java class docs for learning about the various filters and how they work (and filter + * arguments). + * + * Pre-existing Filters can be used like this: + * + * Get get(row); + * get.SetFilter(FilterFactory::ColumnPrefixFilter("foo_")); + * + * Custom filters can be invoked like this: + * Get get(row); + * std::string filter_java_class_name = "foo.bar.baz"; + * auto filter_data = std::make_unique(); + * filter_data->set_foo(foo); + * get.SetFilter(std::make_unique(filter_java_class_name, filter_data)); + * + */ +class Filter { + public: + Filter(std::string java_class_name, std::unique_ptr data) + : java_class_name_(java_class_name), data_(std::move(data)) {} + virtual ~Filter() {} + + const std::string java_class_name() const { return java_class_name_; } + + const Message& data() const { return *data_; } + /** + * Serialize the filter data to the given buffer. Does protobuf encoding by default. + * Can be overriden if Filter does not use protobuf. + */ + virtual void Serialize(std::string* buf) const { + if (data_ != nullptr) { + data_->SerializeToString(buf); + } + } + + /** Internal method */ + static std::unique_ptr ToProto(const Filter& filter) { + auto pb_filter = std::make_unique(); + pb_filter->set_name(filter.java_class_name()); + filter.Serialize(pb_filter->mutable_serialized_filter()); + return std::move(pb_filter); + } + + private: + std::unique_ptr data_; + std::string java_class_name_; +}; + +/** + * Comparator for filters. See ByteArrayComparable documentation in Java. + */ +class Comparator { + public: + Comparator(std::string java_class_name, std::unique_ptr data) + : java_class_name_(java_class_name), data_(std::move(data)) {} + virtual ~Comparator() {} + + const std::string java_class_name() const { return java_class_name_; } + + /** + * Serialize the Comparator data to the given buffer. Does protobuf encoding by default. + * Can be overriden if Comparator does not use protobuf. + */ + virtual void Serialize(std::string* buf) const { + if (data_ != nullptr) { + data_->SerializeToString(buf); + } + } + + /** Internal method */ + static std::unique_ptr ToProto(const Comparator& comparator) { + auto pb_comparator = std::make_unique(); + pb_comparator->set_name(comparator.java_class_name()); + comparator.Serialize(pb_comparator->mutable_serialized_comparator()); + return std::move(pb_comparator); + } + + private: + std::unique_ptr data_; + std::string java_class_name_; +}; + +/** + * Used in row range filters + */ +struct RowRange { + std::string start_row; + bool start_row_inclusive; + std::string stop_row; + bool stop_row_inclusive; +}; + +/** + * Factory for creating pre-defined filters. + */ +class FilterFactory { + public: + static std::unique_ptr ColumnCountGetFilter(uint32_t limit) noexcept { + auto data = std::make_unique(); + data->set_limit(limit); + return std::make_unique("org.apache.hadoop.hbase.filter.ColumnCountGetFilter", + std::move(data)); + } + + static std::unique_ptr ColumnPaginationFilter(uint32_t limit, uint32_t offset) noexcept { + auto data = std::make_unique(); + data->set_limit(limit); + data->set_offset(offset); + return std::make_unique("org.apache.hadoop.hbase.filter.ColumnPaginationFilter", + std::move(data)); + } + + static std::unique_ptr ColumnPaginationFilter(uint32_t limit, + const std::string& column_offset) noexcept { + auto data = std::make_unique(); + data->set_limit(limit); + data->set_column_offset(column_offset); + return std::make_unique("org.apache.hadoop.hbase.filter.ColumnPaginationFilter", + std::move(data)); + } + + static std::unique_ptr ColumnPrefixFilter(const std::string& prefix) noexcept { + auto data = std::make_unique(); + data->set_prefix(prefix); + return std::make_unique("org.apache.hadoop.hbase.filter.ColumnPrefixFilter", + std::move(data)); + } + + static std::unique_ptr ColumnRangeFilter(const std::string& min_column, + bool min_column_inclusive, + const std::string& max_column, + bool max_column_inclusive) noexcept { + auto data = std::make_unique(); + data->set_min_column(min_column); + data->set_min_column_inclusive(min_column_inclusive); + data->set_max_column(max_column); + data->set_max_column_inclusive(max_column_inclusive); + return std::make_unique("org.apache.hadoop.hbase.filter.ColumnRangeFilter", + std::move(data)); + } + + static std::unique_ptr CompareFilter(pb::CompareType compare_op, + const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_compare_op(compare_op); + data->set_allocated_comparator(Comparator::ToProto(comparator).release()); + return std::move(data); + } + + /** + * Build a dependent column filter with value checking + * dependent column varies will be compared using the supplied + * compareOp and comparator, for usage of which + * refer to {@link CompareFilter} + * + * @param family dependent column family + * @param qualifier dependent column qualifier + * @param drop_dependent_column whether the column should be discarded after + * @param compare_op comparison op + * @param comparator comparator + */ + static std::unique_ptr DependentColumnFilter(const std::string& family, + const std::string& qualifier, + bool drop_dependent_column, + pb::CompareType compare_op, + const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_column_family(family); + data->set_column_qualifier(qualifier); + data->set_drop_dependent_column(drop_dependent_column); + data->set_allocated_compare_filter(CompareFilter(compare_op, comparator).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.DependentColumnFilter", + std::move(data)); + } + + static std::unique_ptr FamilyFilter(pb::CompareType compare_op, + const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_allocated_compare_filter(CompareFilter(compare_op, comparator).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.FamilyFilter", std::move(data)); + } + + static std::unique_ptr FilterAllFilter() noexcept { + auto data = std::make_unique(); + return std::make_unique("org.apache.hadoop.hbase.filter.FilterAllFilter", + std::move(data)); + } + + static std::unique_ptr FilterList( + pb::FilterList_Operator op, const std::vector>& filters) noexcept { + auto data = std::make_unique(); + data->set_operator_(op); + for (auto const& f : filters) { + data->mutable_filters()->AddAllocated(Filter::ToProto(*f).release()); + } + return std::make_unique("org.apache.hadoop.hbase.filter.FilterList", std::move(data)); + } + + static std::unique_ptr FirstKeyOnlyFilter() noexcept { + auto data = std::make_unique(); + return std::make_unique("org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter", + std::move(data)); + } + + static std::unique_ptr FirstKeyValueMatchingQualifiersFilter( + const std::set& qualifiers) noexcept { + auto data = std::make_unique(); + for (auto q : qualifiers) { + data->add_qualifiers(q); + } + return std::make_unique( + "org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter", std::move(data)); + } + + static std::unique_ptr FuzzyRowFilter( + const std::vector>& fuzzy_keys_data) noexcept { + auto data = std::make_unique(); + for (auto q : fuzzy_keys_data) { + auto p = data->add_fuzzy_keys_data(); + p->set_first(q.first); + p->set_second(q.second); + } + return std::make_unique("org.apache.hadoop.hbase.filter.FuzzyRowFilter", + std::move(data)); + } + + static std::unique_ptr InclusiveStopFilter(const std::string& stop_row_key) noexcept { + auto data = std::make_unique(); + data->set_stop_row_key(stop_row_key); + return std::make_unique("org.apache.hadoop.hbase.filter.InclusiveStopFilter", + std::move(data)); + } + + static std::unique_ptr KeyOnlyFilter(bool len_as_val) noexcept { + auto data = std::make_unique(); + data->set_len_as_val(len_as_val); + return std::make_unique("org.apache.hadoop.hbase.filter.KeyOnlyFilter", + std::move(data)); + } + + static std::unique_ptr MultipleColumnPrefixFilter( + const std::vector& sorted_prefixes) noexcept { + auto data = std::make_unique(); + for (auto p : sorted_prefixes) { + data->add_sorted_prefixes(p); + } + return std::make_unique("org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter", + std::move(data)); + } + + static std::unique_ptr MultiRowRangeFilter( + const std::vector& row_ranges) noexcept { + auto data = std::make_unique(); + for (auto r : row_ranges) { + auto range = data->add_row_range_list(); + range->set_start_row(r.start_row); + range->set_start_row_inclusive(r.start_row_inclusive); + range->set_stop_row(r.stop_row); + range->set_stop_row_inclusive(r.stop_row_inclusive); + } + return std::make_unique("org.apache.hadoop.hbase.filter.MultiRowRangeFilter", + std::move(data)); + } + + static std::unique_ptr PageFilter(uint64_t page_size) noexcept { + auto data = std::make_unique(); + data->set_page_size(page_size); + return std::make_unique("org.apache.hadoop.hbase.filter.PageFilter", std::move(data)); + } + + static std::unique_ptr PrefixFilter(const std::string& prefix) noexcept { + auto data = std::make_unique(); + data->set_prefix(prefix); + return std::make_unique("org.apache.hadoop.hbase.filter.PrefixFilter", std::move(data)); + } + + static std::unique_ptr QualifierFilter(pb::CompareType compare_op, + const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_allocated_compare_filter(CompareFilter(compare_op, comparator).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.QualifierFilter", + std::move(data)); + } + + static std::unique_ptr RandomRowFilter(float chance) noexcept { + auto data = std::make_unique(); + data->set_chance(chance); + return std::make_unique("org.apache.hadoop.hbase.filter.RandomRowFilter", + std::move(data)); + } + + static std::unique_ptr RowFilter(pb::CompareType compare_op, + const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_allocated_compare_filter(CompareFilter(compare_op, comparator).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.RowFilter", std::move(data)); + } + + static std::unique_ptr SingleColumnValueExcludeFilter( + const std::string& family, const std::string& qualifier, bool filter_if_missing, + bool latest_version_only, pb::CompareType compare_op, const Comparator& comparator) noexcept { + auto data = std::make_unique(); + auto f = SingleColumnValueFilterProto(family, qualifier, filter_if_missing, latest_version_only, + compare_op, comparator); + data->set_allocated_single_column_value_filter(f.release()); + return std::make_unique("org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter", + std::move(data)); + } + + static std::unique_ptr SingleColumnValueFilterProto( + const std::string& family, const std::string& qualifier, bool filter_if_missing, + bool latest_version_only, pb::CompareType compare_op, const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_column_family(family); + data->set_column_qualifier(qualifier); + data->set_compare_op(compare_op); + data->set_filter_if_missing(filter_if_missing); + data->set_latest_version_only(latest_version_only); + data->set_allocated_comparator(Comparator::ToProto(comparator).release()); + return data; + } + + static std::unique_ptr SingleColumnValueFilter( + const std::string& family, const std::string& qualifier, bool filter_if_missing, + bool latest_version_only, pb::CompareType compare_op, const Comparator& comparator) noexcept { + auto data = SingleColumnValueFilterProto(family, qualifier, filter_if_missing, + latest_version_only, compare_op, comparator); + return std::make_unique("org.apache.hadoop.hbase.filter.SingleColumnValueFilter", + std::move(data)); + } + + static std::unique_ptr SkipFilter(const Filter& filter) noexcept { + auto data = std::make_unique(); + data->set_allocated_filter(Filter::ToProto(filter).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.SkipFilter", std::move(data)); + } + + static std::unique_ptr TimestampsFilter(std::vector timestamps, + bool can_hint) noexcept { + auto data = std::make_unique(); + for (auto t : timestamps) { + data->add_timestamps(t); + } + data->set_can_hint(can_hint); + return std::make_unique("org.apache.hadoop.hbase.filter.TimestampsFilter", + std::move(data)); + } + + static std::unique_ptr ValueFilter(pb::CompareType compare_op, + const Comparator& comparator) noexcept { + auto data = std::make_unique(); + data->set_allocated_compare_filter(CompareFilter(compare_op, comparator).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.ValueFilter", std::move(data)); + } + + static std::unique_ptr WhileMatchFilter(const Filter& filter) noexcept { + auto data = std::make_unique(); + data->set_allocated_filter(Filter::ToProto(filter).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.WhileMatchFilter", + std::move(data)); + } +}; + +/** + * Factory for creating pre-defined Comparators. + */ +class ComparatorFactory { + public: + static std::unique_ptr ByteArrayComparable( + const std::string& value) noexcept { + auto data = std::make_unique(); + data->set_value(value); + return std::move(data); + } + + static std::unique_ptr BinaryComparator(const std::string& value) noexcept { + auto data = std::make_unique(); + data->set_allocated_comparable(ByteArrayComparable(value).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.BinaryComparator", + std::move(data)); + } + + static std::unique_ptr LongComparator(const std::string& value) noexcept { + // TODO: this should take a uint64_t argument, not a byte array. + auto data = std::make_unique(); + data->set_allocated_comparable(ByteArrayComparable(value).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.LongComparator", + std::move(data)); + } + + static std::unique_ptr BinaryPrefixComparator(const std::string& value) noexcept { + auto data = std::make_unique(); + data->set_allocated_comparable(ByteArrayComparable(value).release()); + return std::make_unique("org.apache.hadoop.hbase.filter.BinaryPrefixComparator", + std::move(data)); + } + + static std::unique_ptr BitComparator(const std::string& value, + pb::BitComparator_BitwiseOp bit_op) noexcept { + auto data = std::make_unique(); + data->set_allocated_comparable(ByteArrayComparable(value).release()); + data->set_bitwise_op(bit_op); + return std::make_unique("org.apache.hadoop.hbase.filter.BitComparator", + std::move(data)); + } + + static std::unique_ptr NullComparator() noexcept { + auto data = std::make_unique(); + return std::make_unique("org.apache.hadoop.hbase.filter.NullComparator", + std::move(data)); + } + + /** + * @param pattern a valid regular expression + * @param pattern_flags java.util.regex.Pattern flags + * @param charset the charset name + * @param engine engine implementation type, either JAVA or JONI + */ + static std::unique_ptr RegexStringComparator( + const std::string& pattern, int32_t pattern_flags, const std::string& charset = "UTF-8", + const std::string& engine = "JAVA") noexcept { + auto data = std::make_unique(); + data->set_pattern(pattern); + data->set_pattern_flags(pattern_flags); + data->set_charset(charset); + data->set_engine(engine); + return std::make_unique("org.apache.hadoop.hbase.filter.RegexStringComparator", + std::move(data)); + } + + static std::unique_ptr SubstringComparator(const std::string& substr) noexcept { + auto data = std::make_unique(); + data->set_substr(substr); + return std::make_unique("org.apache.hadoop.hbase.filter.SubstringComparator", + std::move(data)); + } +}; +} // namespace hbase diff --git hbase-native-client/core/get.h hbase-native-client/core/get.h index f79c633..92d1fee 100644 --- hbase-native-client/core/get.h +++ hbase-native-client/core/get.h @@ -24,6 +24,7 @@ #include #include #include +#include "core/query.h" #include "core/time_range.h" #include "if/Client.pb.h" @@ -35,7 +36,7 @@ namespace hbase { */ using FamilyMap = std::map>; -class Get { +class Get : public Query { public: /** * Constructors diff --git hbase-native-client/core/query.h hbase-native-client/core/query.h new file mode 100644 index 0000000..b706303 --- /dev/null +++ hbase-native-client/core/query.h @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +#pragma once + +#include "core/filter.h" + +namespace hbase { + +/** + * Base class for read RPC calls (Get / Scan). + */ +class Query { + public: + virtual ~Query() {} + + void SetFilter(std::unique_ptr filter) { filter_ = std::move(filter); } + + const std::unique_ptr& filter() const { return filter_; } + + protected: + std::unique_ptr filter_ = nullptr; +}; + +} // namespace hbase diff --git hbase-native-client/core/request_converter.cc hbase-native-client/core/request_converter.cc index eba07df..149202e 100644 --- hbase-native-client/core/request_converter.cc +++ hbase-native-client/core/request_converter.cc @@ -68,6 +68,10 @@ std::unique_ptr RequestConverter::ToGetRequest(const Get &get, } } + if (get.filter() != nullptr) { + pb_get->set_allocated_filter(Filter::ToProto(*(get.filter())).release()); + } + return pb_req; } @@ -108,6 +112,10 @@ std::unique_ptr RequestConverter::ToScanRequest(const Scan &scan, } } + if (scan.filter() != nullptr) { + pb_scan->set_allocated_filter(Filter::ToProto(*(scan.filter())).release()); + } + // TODO We will change this later. pb_msg->set_client_handles_partials(false); pb_msg->set_client_handles_heartbeats(false); diff --git hbase-native-client/core/scan.h hbase-native-client/core/scan.h index e2e7f1a..7e8c7bd 100644 --- hbase-native-client/core/scan.h +++ hbase-native-client/core/scan.h @@ -36,7 +36,7 @@ namespace hbase { */ using FamilyMap = std::map>; -class Scan { +class Scan : public Query { public: /** * @brief Constructors. Create a Scan operation across all rows.