Skip to content

Commit

Permalink
HIVE-11407 : JDBC DatabaseMetaData.getTables with large no of tables …
Browse files Browse the repository at this point in the history
…call leads to HS2 OOM (Sushanth Sowmyan via Thejas Nair)
  • Loading branch information
Thejas Nair committed Aug 4, 2015
1 parent c7e1d34 commit 46739a6
Show file tree
Hide file tree
Showing 3 changed files with 118 additions and 3 deletions.
104 changes: 104 additions & 0 deletions ql/src/java/org/apache/hadoop/hive/ql/metadata/TableIterable.java
@@ -0,0 +1,104 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.metadata;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.thrift.TException;

/**
* Use this to get Table objects for a table list. It provides an iterator to
* on the resulting Table objects. It batches the calls to
* IMetaStoreClient.getTableObjectsByName to avoid OOM issues in HS2 (with
* embedded metastore) or MetaStore server (if HS2 is using remote metastore).
*
*/
public class TableIterable implements Iterable<Table> {

@Override
public Iterator<Table> iterator() {
return new Iterator<Table>() {

private final Iterator<String> tableNamesIter = tableNames.iterator();
private Iterator<org.apache.hadoop.hive.metastore.api.Table> batchIter = null;

@Override
public boolean hasNext() {
return ((batchIter != null) && batchIter.hasNext()) || tableNamesIter.hasNext();
}

@Override
public Table next() {
if ((batchIter == null) || !batchIter.hasNext()) {
getNextBatch();
}
return batchIter.next();
}

private void getNextBatch() {
// get next batch of table names in this list
List<String> nameBatch = new ArrayList<String>();
int batch_counter = 0;
while (batch_counter < batch_size && tableNamesIter.hasNext()) {
nameBatch.add(tableNamesIter.next());
batch_counter++;
}
// get the Table objects for this batch of table names and get iterator
// on it
try {
try {
batchIter = msc.getTableObjectsByName(dbname, nameBatch).iterator();
} catch (TException e) {
throw new HiveException(e);
}
} catch (HiveException e) {
throw new RuntimeException(e);
}
}

@Override
public void remove() {
throw new IllegalStateException(
"TableIterable is a read-only iterable and remove() is unsupported");
}
};
}

private final IMetaStoreClient msc;
private final String dbname;
private final List<String> tableNames;
private final int batch_size;

/**
* Primary constructor that fetches all tables in a given msc, given a Hive
* object,a db name and a table name list
*/
public TableIterable(IMetaStoreClient msc, String dbname, List<String> tableNames, int batch_size)
throws TException {
this.msc = msc;
this.dbname = dbname;
this.tableNames = tableNames;
this.batch_size = batch_size;
}

}
Expand Up @@ -30,10 +30,12 @@
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.metadata.TableIterable;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.ColumnDescriptor;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
Expand Down Expand Up @@ -153,11 +155,15 @@ public void runInternal() throws HiveSQLException {
authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
}

int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
String dbName = dbTabs.getKey();
List<String> tableNames = dbTabs.getValue();
for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));

for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {

TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName,
table.getTableName()));
for (ColumnDescriptor column : schema.getColumnDescriptors()) {
if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
continue;
Expand Down
Expand Up @@ -22,11 +22,14 @@
import java.util.List;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.metadata.TableIterable;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObjectUtils;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
Expand Down Expand Up @@ -88,9 +91,11 @@ public void runInternal() throws HiveSQLException {
}

String tablePattern = convertIdentifierPattern(tableName, true);
int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);

for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
Object[] rowData = new Object[] {
DEFAULT_HIVE_CATALOG,
table.getDbName(),
Expand Down

0 comments on commit 46739a6

Please sign in to comment.