Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3117,6 +3117,13 @@ public ClusterMetrics getClusterMetricsWithoutCoprocessor(EnumSet<Option> option
Map<String, TableDescriptor> tableDescriptorMap = getTableDescriptors().getAll();
for (TableDescriptor tableDescriptor : tableDescriptorMap.values()) {
TableName tableName = tableDescriptor.getTableName();
if (
tableName.isSystemTable() && tableName.getQualifierAsString().startsWith("meta")
&& !tableName.equals(TableName.META_TABLE_NAME)
) {
LOG.info("Skipping foreign meta table {} in cluster metrics", tableName);
continue;
}
RegionStatesCount regionStatesCount =
assignmentManager.getRegionStatesCount(tableName);
tableRegionStatesCountMap.put(tableName, regionStatesCount);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,8 @@ private void loadRegionsFromFS(final HashSet<String> mergedParentRegions, final
FileSystem fs = master.getMasterFileSystem().getFileSystem();

int numRegions = 0;
List<Path> tableDirs = FSUtils.getTableDirs(fs, rootDir);
List<Path> tableDirs =
FSUtils.getTableDirs(fs, rootDir).stream().filter(FSUtils::isLocalMetaTable).toList();
for (Path tableDir : tableDirs) {
List<Path> regionDirs = FSUtils.getRegionDirs(fs, tableDir);
for (Path regionDir : regionDirs) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,8 @@ public Map<String, TableDescriptor> getAll() throws IOException {
LOG.info("Fetching table descriptors from the filesystem.");
final long startTime = EnvironmentEdgeManager.currentTime();
AtomicBoolean allvisited = new AtomicBoolean(usecache);
List<Path> tableDirs = FSUtils.getTableDirs(fs, rootdir);
List<Path> tableDirs =
FSUtils.getTableDirs(fs, rootdir).stream().filter(FSUtils::isLocalMetaTable).toList();
if (!tableDescriptorParallelLoadEnable) {
for (Path dir : tableDirs) {
internalGet(dir, tds, allvisited);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1046,6 +1046,25 @@ public static List<Path> getLocalTableDirs(final FileSystem fs, final Path rootd
return tabledirs;
}

/**
* A filter to exclude meta tables belonging to foreign clusters. This is essential in a
* read-replica setup where multiple clusters share the same fs.
* @param tablePath The Path to the table directory.
* @return {@code true} if the path is a regular table or the cluster's own meta table.
* {@code false} if it is a meta table belonging to a different cluster.
*/
public static boolean isLocalMetaTable(Path tablePath) {
if (tablePath == null) {
return false;
}
String dirName = tablePath.getName();
if (dirName.startsWith(TableName.META_TABLE_NAME.getQualifierAsString())) {
return TableName.valueOf(TableName.META_TABLE_NAME.getNamespaceAsString(), dirName)
.equals(TableName.META_TABLE_NAME);
}
return true;
}

/**
* Filter for all dirs that don't start with '.'
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,29 @@
*/
package org.apache.hadoop.hbase.master;

import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;

import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.ServerMetricsBuilder;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.StartTestingClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.YouAreDeadException;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionStatesCount;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
Expand Down Expand Up @@ -192,4 +203,26 @@ public void testDefaultMasterProcMetrics() throws Exception {
MetricsMasterProcSource masterSource = master.getMasterMetrics().getMetricsProcSource();
metricsHelper.assertGauge("numMasterWALs", master.getNumWALFiles(), masterSource);
}

@Test
public void testClusterMetricsMetaTableSkipping() throws Exception {
TableName replicaMetaTable = TableName.valueOf("hbase", "meta_replica");
TableDescriptor replicaMetaDescriptor = TableDescriptorBuilder.newBuilder(replicaMetaTable)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("info")).build();
master.getTableDescriptors().update(replicaMetaDescriptor, true);
try {
ClusterMetrics metrics = master.getClusterMetricsWithoutCoprocessor(
EnumSet.of(ClusterMetrics.Option.TABLE_TO_REGIONS_COUNT));
Map<TableName, RegionStatesCount> tableRegionStatesCount =
metrics.getTableRegionStatesCount();

assertFalse("Foreign meta table should not be present",
tableRegionStatesCount.containsKey(replicaMetaTable));
assertTrue("Local meta should be present",
tableRegionStatesCount.containsKey(TableName.META_TABLE_NAME));

} finally {
master.getTableDescriptors().remove(replicaMetaTable);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,20 +31,28 @@
import java.util.Optional;
import java.util.concurrent.Future;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.TableStateManager;
import org.apache.hadoop.hbase.master.hbck.HbckChore;
import org.apache.hadoop.hbase.master.hbck.HbckReport;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.Before;
Expand Down Expand Up @@ -247,4 +255,39 @@ public void testChoreDisable() {
hbckChoreWithChangedConf.choreForTesting();
assertNull(hbckChoreWithChangedConf.getLastReport());
}

@Test
public void testChoreSkipsForeignMetaTables() throws Exception {
FileSystem fs = master.getMasterFileSystem().getFileSystem();
Path rootDir = master.getMasterFileSystem().getRootDir();
String[] metaTables = { "meta_replica1", "meta" };
Path hbaseNamespaceDir = new Path(rootDir, HConstants.BASE_NAMESPACE_DIR + "/hbase");
fs.mkdirs(hbaseNamespaceDir);

for (String metaTable : metaTables) {
TableName tableName = TableName.valueOf("hbase", metaTable);
Path metaTableDir = new Path(hbaseNamespaceDir, metaTable);
fs.mkdirs(metaTableDir);
fs.mkdirs(new Path(metaTableDir, FSTableDescriptors.TABLEINFO_DIR));
fs.mkdirs(new Path(metaTableDir, "abcdef0123456789"));

TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY)
.setMaxVersions(HConstants.DEFAULT_HBASE_META_VERSIONS).setInMemory(true)
.setBlocksize(HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)
.setBloomFilterType(BloomType.ROWCOL).build())
.build();

Path tableDir = CommonFSUtils.getTableDir(rootDir, tableName);
FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir, tableDescriptor,
false);
}

assertTrue("HbckChore should run successfully", hbckChore.runChore());
HbckReport report = hbckChore.getLastReport();
assertNotNull("HbckReport should not be null", report);
boolean hasForeignMetaOrphan = report.getOrphanRegionsOnFS().values().stream()
.anyMatch(path -> path.toString().contains("meta_replica1"));
assertFalse("HbckChore should not report foreign meta tables as orphans", hasForeignMetaOrphan);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.AfterClass;
Expand Down Expand Up @@ -483,6 +484,38 @@ public void testIgnoreBrokenTableDescriptorFiles() throws IOException {
assertFalse(fs.exists(brokenFile));
}

@Test
public void testFSTableDescriptorsSkipsForeignMetaTables() throws Exception {
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
String[] metaTables = { "meta_replica1", "meta" };
Path hbaseNamespaceDir = new Path(testDir, HConstants.BASE_NAMESPACE_DIR + "/hbase");
fs.mkdirs(hbaseNamespaceDir);

for (String metaTable : metaTables) {
TableName tableName = TableName.valueOf("hbase", metaTable);
Path metaTableDir = new Path(hbaseNamespaceDir, metaTable);
fs.mkdirs(metaTableDir);
fs.mkdirs(new Path(metaTableDir, FSTableDescriptors.TABLEINFO_DIR));
fs.mkdirs(new Path(metaTableDir, "abcdef0123456789"));

TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY)
.setMaxVersions(HConstants.DEFAULT_HBASE_META_VERSIONS).setInMemory(true)
.setBlocksize(HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)
.setBloomFilterType(BloomType.ROWCOL).build())
.build();

Path tableDir = CommonFSUtils.getTableDir(testDir, tableName);
FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir, tableDescriptor,
false);
}
FSTableDescriptors tableDescriptors = new FSTableDescriptors(fs, testDir);
Map<String, TableDescriptor> allTables = tableDescriptors.getAll();

assertFalse("Should not contain meta_replica1", allTables.containsKey("hbase:meta_replica1"));
assertTrue("Should include the local hbase:meta", allTables.containsKey("hbase:meta"));
}

private static class FSTableDescriptorsTest extends FSTableDescriptors {

public FSTableDescriptorsTest(FileSystem fs, Path rootdir) {
Expand Down