Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
0d3014c
HBASE-28996: Implement Custom ReplicationEndpoint to Enable WAL Backu…
vinayakphegde Feb 18, 2025
0bff7eb
HBASE-29025: Enhance the full backup command to support Continuous Ba…
vinayakphegde Mar 4, 2025
912ef67
HBASE-29210: Introduce Validation for PITR-Critical Backup Deletion (…
vinayakphegde Apr 10, 2025
0e3b5e4
HBASE-29261: Investigate flaw in backup deletion validation of PITR-c…
vinayakphegde May 20, 2025
c4bef9e
HBASE-29133: Implement "pitr" Command for Point-in-Time Restore (#6717)
vinayakphegde May 30, 2025
716dab8
HBASE-29255: Integrate backup WAL cleanup logic with the delete comma…
vinayakphegde Jun 11, 2025
b54da1b
HBASE-28990 Modify Incremental Backup for Continuous Backup (#6788)
ankitsol Jun 20, 2025
393602d
HBASE-29350: Ensure Cleanup of Continuous Backup WALs After Last Back…
vinayakphegde Jun 23, 2025
1a4c610
HBASE-29219 Ignore Empty WAL Files While Consuming Backed-Up WAL File…
vinayakphegde Jun 24, 2025
1a2ff7b
HBASE-29406: Skip Copying Bulkloaded Files to Backup Location in Cont…
vinayakphegde Jun 27, 2025
27ea7b3
HBASE-29449 Update backup describe command for continuous backup (#7045)
ankitsol Jul 15, 2025
37e195a
HBASE-29445 Add Option to Specify Custom Backup Location in PITR (#7153)
vinayakphegde Jul 16, 2025
aa69616
HBASE-29441 ReplicationSourceShipper should delegate the empty wal en…
vinayakphegde Jul 16, 2025
a4cd71a
HBASE-29459 Capture bulkload files only till IncrCommittedWalTs durin…
ankitsol Jul 22, 2025
3c5c999
HBASE-29310 Handle Bulk Load Operations in Continuous Backup (#7150)
ankitsol Jul 23, 2025
fa6b83f
HBASE-28957 spotless apply after rebase
vinayakphegde Jul 29, 2025
3044b11
HBASE-29375 Add Unit Tests for BackupAdminImpl and Improve Test Granu…
vinayakphegde Jul 29, 2025
176e8c6
HBASE-29519 Copy Bulkloaded Files in Continuous Backup (#7222)
vinayakphegde Aug 20, 2025
5d815b8
HBASE-29524 Handle bulk-loaded HFiles in delete and cleanup process (…
vinayakphegde Aug 26, 2025
29c228a
[HBASE-29520] Utilize Backed-up Bulkloaded Files in Incremental Backu…
ankitsol Sep 8, 2025
9bd36d0
Revert "HBASE-29310 Handle Bulk Load Operations in Continuous Backup …
anmolnar Sep 11, 2025
26f51a0
HBASE-29521: Update Restore Command to Handle Bulkloaded Files (#7300)
vinayakphegde Sep 25, 2025
12e1292
HBASE-29656 Scan WALs to identify bulkload operations for incremental…
ankitsol Oct 27, 2025
6aa212f
HBASE-28957. Build + spotless fix
anmolnar Nov 6, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions bin/hbase
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ show_usage() {
echo " version Print the version"
echo " backup Backup tables for recovery"
echo " restore Restore tables from existing backup image"
echo " pitr Restore tables to a specific point in time using backup and WAL replay"
echo " completebulkload Run BulkLoadHFiles tool"
echo " regionsplitter Run RegionSplitter tool"
echo " rowcounter Run RowCounter tool"
Expand Down Expand Up @@ -639,6 +640,22 @@ elif [ "$COMMAND" = "restore" ] ; then
fi
done
fi
elif [ "$COMMAND" = "pitr" ] ; then
CLASS='org.apache.hadoop.hbase.backup.PointInTimeRestoreDriver'
if [ -n "${shaded_jar}" ] ; then
for f in "${HBASE_HOME}"/lib/hbase-backup*.jar; do
if [ -f "${f}" ]; then
CLASSPATH="${CLASSPATH}:${f}"
break
fi
done
for f in "${HBASE_HOME}"/lib/commons-lang3*.jar; do
if [ -f "${f}" ]; then
CLASSPATH="${CLASSPATH}:${f}"
break
fi
done
fi
elif [ "$COMMAND" = "upgrade" ] ; then
echo "This command was used to upgrade to HBase 0.96, it was removed in HBase 2.0.0."
echo "Please follow the documentation at http://hbase.apache.org/book.html#upgrading."
Expand Down
5 changes: 5 additions & 0 deletions hbase-backup/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,11 @@
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-inline</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,232 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.backup;

import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_CHECK;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_CHECK_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_OVERWRITE;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_OVERWRITE_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_RESTORE_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_MAPPING;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_MAPPING_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_RESTORE_DESC;

import java.io.IOException;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;

@InterfaceAudience.Private
public abstract class AbstractRestoreDriver extends AbstractHBaseTool {
protected static final Logger LOG = LoggerFactory.getLogger(AbstractRestoreDriver.class);
protected CommandLine cmd;

protected static final String USAGE_FOOTER = "";

protected AbstractRestoreDriver() {
init();
}

protected void init() {
Log4jUtils.disableZkAndClientLoggers();
}

protected abstract int executeRestore(boolean check, TableName[] fromTables, TableName[] toTables,
boolean isOverwrite);

private int parseAndRun() throws IOException {
if (!BackupManager.isBackupEnabled(getConf())) {
System.err.println(BackupRestoreConstants.ENABLE_BACKUP);
return -1;
}

if (cmd.hasOption(OPTION_DEBUG)) {
Log4jUtils.setLogLevel("org.apache.hadoop.hbase.backup", "DEBUG");
}

boolean overwrite = cmd.hasOption(OPTION_OVERWRITE);
if (overwrite) {
LOG.debug("Found overwrite option (-{}) in restore command, "
+ "will overwrite to existing table if any in the restore target", OPTION_OVERWRITE);
}

boolean check = cmd.hasOption(OPTION_CHECK);
if (check) {
LOG.debug(
"Found check option (-{}) in restore command, will check and verify the dependencies",
OPTION_CHECK);
}

if (cmd.hasOption(OPTION_SET) && cmd.hasOption(OPTION_TABLE)) {
System.err.printf(
"Set name (-%s) and table list (-%s) are mutually exclusive, you can not specify both "
+ "of them.%n",
OPTION_SET, OPTION_TABLE);
printToolUsage();
return -1;
}

if (!cmd.hasOption(OPTION_SET) && !cmd.hasOption(OPTION_TABLE)) {
System.err.printf(
"You have to specify either set name (-%s) or table list (-%s) to " + "restore%n",
OPTION_SET, OPTION_TABLE);
printToolUsage();
return -1;
}

if (cmd.hasOption(OPTION_YARN_QUEUE_NAME)) {
String queueName = cmd.getOptionValue(OPTION_YARN_QUEUE_NAME);
// Set MR job queuename to configuration
getConf().set("mapreduce.job.queuename", queueName);
}

String tables;
TableName[] sTableArray;
TableName[] tTableArray;

String tableMapping = cmd.getOptionValue(OPTION_TABLE_MAPPING);

try (final Connection conn = ConnectionFactory.createConnection(conf)) {
// Check backup set
if (cmd.hasOption(OPTION_SET)) {
String setName = cmd.getOptionValue(OPTION_SET);
try {
tables = getTablesForSet(conn, setName);
} catch (IOException e) {
System.out.println("ERROR: " + e.getMessage() + " for setName=" + setName);
printToolUsage();
return -2;
}
if (tables == null) {
System.out
.println("ERROR: Backup set '" + setName + "' is either empty or does not exist");
printToolUsage();
return -3;
}
} else {
tables = cmd.getOptionValue(OPTION_TABLE);
}

sTableArray = BackupUtils.parseTableNames(tables);
tTableArray = BackupUtils.parseTableNames(tableMapping);

if (
sTableArray != null && tTableArray != null && (sTableArray.length != tTableArray.length)
) {
System.out.println("ERROR: table mapping mismatch: " + tables + " : " + tableMapping);
printToolUsage();
return -4;
}
}

return executeRestore(check, sTableArray, tTableArray, overwrite);
}

@Override
protected void addOptions() {
addOptNoArg(OPTION_OVERWRITE, OPTION_OVERWRITE_DESC);
addOptNoArg(OPTION_CHECK, OPTION_CHECK_DESC);
addOptNoArg(OPTION_DEBUG, OPTION_DEBUG_DESC);
addOptWithArg(OPTION_SET, OPTION_SET_RESTORE_DESC);
addOptWithArg(OPTION_TABLE, OPTION_TABLE_LIST_DESC);
addOptWithArg(OPTION_TABLE_MAPPING, OPTION_TABLE_MAPPING_DESC);
addOptWithArg(OPTION_YARN_QUEUE_NAME, OPTION_YARN_QUEUE_NAME_RESTORE_DESC);
}

@Override
protected void processOptions(CommandLine cmd) {
this.cmd = cmd;
}

@Override
protected int doWork() throws Exception {
return parseAndRun();
}

@Override
public int run(String[] args) {
Objects.requireNonNull(conf, "Tool configuration is not initialized");

try {
cmd = parseArgs(args);
} catch (Exception e) {
System.out.println("Error parsing command-line arguments: " + e.getMessage());
printToolUsage();
return EXIT_FAILURE;
}

if (cmd.hasOption(SHORT_HELP_OPTION) || cmd.hasOption(LONG_HELP_OPTION)) {
printToolUsage();
return EXIT_FAILURE;
}

processOptions(cmd);

try {
return doWork();
} catch (Exception e) {
LOG.error("Error running restore tool", e);
return EXIT_FAILURE;
}
}

protected void printToolUsage() {
System.out.println(getUsageString());
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.setLeftPadding(2);
helpFormatter.setDescPadding(8);
helpFormatter.setWidth(100);
helpFormatter.setSyntaxPrefix("Options:");
helpFormatter.printHelp(" ", null, options, USAGE_FOOTER);
System.out.println(BackupRestoreConstants.VERIFY_BACKUP);
}

protected abstract String getUsageString();

private String getTablesForSet(Connection conn, String name) throws IOException {
try (final BackupSystemTable table = new BackupSystemTable(conn)) {
List<TableName> tables = table.describeBackupSet(name);

if (tables == null) {
return null;
}

return StringUtils.join(tables, BackupRestoreConstants.TABLENAME_DELIMITER_IN_COMMAND);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,13 @@ public interface BackupAdmin extends Closeable {
*/
void restore(RestoreRequest request) throws IOException;

/**
* Restore the tables to specific time
* @param request Point in Time restore request
* @throws IOException exception
*/
void pointInTimeRestore(PointInTimeRestoreRequest request) throws IOException;

/**
* Describe backup image command
* @param backupId backup id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,17 @@
*/
package org.apache.hadoop.hbase.backup;

import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.LONG_OPTION_ENABLE_CONTINUOUS_BACKUP;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.LONG_OPTION_FORCE_DELETE;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_ENABLE_CONTINUOUS_BACKUP;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_ENABLE_CONTINUOUS_BACKUP_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_FORCE_DELETE;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_FORCE_DELETE_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_IGNORECHECKSUM;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_IGNORECHECKSUM_DESC;
import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_KEEP;
Expand Down Expand Up @@ -159,7 +165,9 @@ protected void addOptions() {
addOptWithArg(OPTION_PATH, OPTION_PATH_DESC);
addOptWithArg(OPTION_KEEP, OPTION_KEEP_DESC);
addOptWithArg(OPTION_YARN_QUEUE_NAME, OPTION_YARN_QUEUE_NAME_DESC);

addOptNoArg(OPTION_ENABLE_CONTINUOUS_BACKUP, LONG_OPTION_ENABLE_CONTINUOUS_BACKUP,
OPTION_ENABLE_CONTINUOUS_BACKUP_DESC);
addOptNoArg(OPTION_FORCE_DELETE, LONG_OPTION_FORCE_DELETE, OPTION_FORCE_DELETE_DESC);
}

@Override
Expand Down
Loading