diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableIterable.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableIterable.java
new file mode 100644
index 0000000..f3af39b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableIterable.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TException;
+
+/**
+ * Use this to get Table objects for a table list. It provides an iterator to
+ * on the resulting Table objects. It batches the calls to
+ * IMetaStoreClient.getTableObjectsByName to avoid OOM issues in HS2 (with
+ * embedded metastore) or MetaStore server (if HS2 is using remote metastore).
+ *
+ */
+public class TableIterable implements Iterable
{
+
+ @Override
+ public Iterator iterator() {
+ return new Iterator() {
+
+ private final Iterator tableNamesIter = tableNames.iterator();
+ private Iterator batchIter = null;
+
+ @Override
+ public boolean hasNext() {
+ return ((batchIter != null) && batchIter.hasNext()) || tableNamesIter.hasNext();
+ }
+
+ @Override
+ public Table next() {
+ if ((batchIter == null) || !batchIter.hasNext()) {
+ getNextBatch();
+ }
+ return batchIter.next();
+ }
+
+ private void getNextBatch() {
+ // get next batch of table names in this list
+ List nameBatch = new ArrayList();
+ int batch_counter = 0;
+ while (batch_counter < batch_size && tableNamesIter.hasNext()) {
+ nameBatch.add(tableNamesIter.next());
+ batch_counter++;
+ }
+ // get the Table objects for this batch of table names and get iterator
+ // on it
+ try {
+ try {
+ batchIter = msc.getTableObjectsByName(dbname, nameBatch).iterator();
+ } catch (TException e) {
+ throw new HiveException(e);
+ }
+ } catch (HiveException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void remove() {
+ throw new IllegalStateException(
+ "TableIterable is a read-only iterable and remove() is unsupported");
+ }
+ };
+ }
+
+ private final IMetaStoreClient msc;
+ private final String dbname;
+ private final List tableNames;
+ private final int batch_size;
+
+ /**
+ * Primary constructor that fetches all tables in a given msc, given a Hive
+ * object,a db name and a table name list
+ */
+ public TableIterable(IMetaStoreClient msc, String dbname, List tableNames, int batch_size)
+ throws TException {
+ this.msc = msc;
+ this.dbname = dbname;
+ this.tableNames = tableNames;
+ this.batch_size = batch_size;
+ }
+
+}
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
index 92ea7b0..0998cde 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
@@ -23,8 +23,11 @@
import java.util.List;
import java.util.regex.Pattern;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.TableIterable;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.ColumnDescriptor;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
@@ -129,10 +132,11 @@ public void runInternal() throws HiveSQLException {
List dbNames = metastoreClient.getDatabases(schemaPattern);
Collections.sort(dbNames);
+ int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
for (String dbName : dbNames) {
List tableNames = metastoreClient.getTables(dbName, tablePattern);
Collections.sort(tableNames);
- for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
+ for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
for (ColumnDescriptor column : schema.getColumnDescriptors()) {
if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
index 823dc4d..4f71928 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
@@ -22,8 +22,11 @@
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.metadata.TableIterable;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
@@ -78,9 +81,11 @@ public void runInternal() throws HiveSQLException {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
String tablePattern = convertIdentifierPattern(tableName, true);
+ int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
+
for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
List tableNames = metastoreClient.getTables(dbName, tablePattern);
- for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
+ for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
Object[] rowData = new Object[] {
DEFAULT_HIVE_CATALOG,
table.getDbName(),