From 18b2756ae0aab554ed99af76da1ca91d0da46c1d Mon Sep 17 00:00:00 2001 From: Jarryd Lee Date: Mon, 9 Jan 2023 14:05:29 -0800 Subject: [PATCH] Pass filesystem instead of conf values. Rename target to restore --- .../apache/hadoop/hbase/backup/BackupInfo.java | 10 +++++----- .../hadoop/hbase/backup/BackupRequest.java | 14 +++++++------- .../hadoop/hbase/backup/BackupTableInfo.java | 2 +- .../apache/hadoop/hbase/backup/RestoreJob.java | 6 ++++-- .../hadoop/hbase/backup/RestoreRequest.java | 14 +++++++------- .../hbase/backup/impl/BackupAdminImpl.java | 8 ++++---- .../hbase/backup/impl/BackupCommands.java | 2 +- .../hbase/backup/impl/BackupManager.java | 18 +++++++++--------- .../hbase/backup/impl/RestoreTablesClient.java | 13 ++++++++----- .../hbase/backup/impl/TableBackupClient.java | 4 ++-- .../mapreduce/MapReduceBackupMergeJob.java | 2 +- .../mapreduce/MapReduceHFileSplitterJob.java | 1 - .../backup/mapreduce/MapReduceRestoreJob.java | 9 +++++---- .../hadoop/hbase/backup/util/BackupUtils.java | 18 +++++------------- .../hadoop/hbase/backup/util/RestoreTool.java | 13 +++++++------ .../hadoop/hbase/backup/TestBackupBase.java | 2 +- .../hadoop/hbase/backup/TestBackupUtils.java | 2 +- ...TestIncrementalBackupMergeWithFailures.java | 2 +- .../hadoop/hbase/backup/TestRemoteRestore.java | 6 +++--- .../hbase/IntegrationTestBackupRestore.java | 4 ++-- 20 files changed, 74 insertions(+), 76 deletions(-) diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java index fdad0d549830..01851cabc726 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java @@ -168,14 +168,14 @@ public BackupInfo() { backupTableInfoMap = new HashMap<>(); } - public BackupInfo(String backupId, BackupType type, TableName[] tables, String targetRootDir) { + public BackupInfo(String backupId, BackupType type, TableName[] tables, String restoreRootDir) { this(); this.backupId = backupId; this.type = type; - this.backupRootDir = targetRootDir; + this.backupRootDir = restoreRootDir; this.addTables(tables); if (type == BackupType.INCREMENTAL) { - setHLogTargetDir(BackupUtils.getLogBackupDir(targetRootDir, backupId)); + setHLogTargetDir(BackupUtils.getLogBackupDir(restoreRootDir, backupId)); } this.startTs = 0; this.completeTs = 0; @@ -213,8 +213,8 @@ public void setType(BackupType type) { this.type = type; } - public void setBackupRootDir(String targetRootDir) { - this.backupRootDir = targetRootDir; + public void setBackupRootDir(String restoreRootDir) { + this.backupRootDir = restoreRootDir; } public void setTotalBytesCopied(long totalBytesCopied) { diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRequest.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRequest.java index c9c7a5b61810..ef0c2fbaa666 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRequest.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupRequest.java @@ -45,8 +45,8 @@ public Builder withTableList(List tables) { return this; } - public Builder withTargetRootDir(String backupDir) { - request.setTargetRootDir(backupDir); + public Builder withRestoreRootDir(String backupDir) { + request.setRestoreRootDir(backupDir); return this; } @@ -78,7 +78,7 @@ public BackupRequest build() { private BackupType type; private List tableList; - private String targetRootDir; + private String restoreRootDir; private int totalTasks = -1; private long bandwidth = -1L; private String backupSetName; @@ -105,13 +105,13 @@ public List getTableList() { return this.tableList; } - private BackupRequest setTargetRootDir(String targetRootDir) { - this.targetRootDir = targetRootDir; + private BackupRequest setRestoreRootDir(String restoreRootDir) { + this.restoreRootDir = restoreRootDir; return this; } - public String getTargetRootDir() { - return this.targetRootDir; + public String getRestoreRootDir() { + return this.restoreRootDir; } private BackupRequest setTotalTasks(int totalTasks) { diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupTableInfo.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupTableInfo.java index 01097422e3a1..c6032b41d42c 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupTableInfo.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupTableInfo.java @@ -43,7 +43,7 @@ public class BackupTableInfo { public BackupTableInfo() { } - public BackupTableInfo(TableName table, String targetRootDir, String backupId) { + public BackupTableInfo(TableName table, String restoreRootDir, String backupId) { this.table = table; } diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreJob.java index b014e6693bbc..3e0d216ed0df 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreJob.java @@ -19,6 +19,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; @@ -34,10 +35,11 @@ public interface RestoreJob extends Configurable { * Run restore operation * @param dirPaths path array of WAL log directories * @param fromTables from tables + * @param restoreFileSystem output file system * @param toTables to tables * @param fullBackupRestore full backup restore * @throws IOException if running the job fails */ - void run(Path[] dirPaths, TableName[] fromTables, TableName[] toTables, boolean fullBackupRestore) - throws IOException; + void run(Path[] dirPaths, TableName[] fromTables, FileSystem restoreFileSystem, + TableName[] toTables, boolean fullBackupRestore) throws IOException; } diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreRequest.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreRequest.java index 4e097188fe7f..f7f1d848d958 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreRequest.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreRequest.java @@ -37,8 +37,8 @@ public Builder withBackupRootDir(String backupRootDir) { return this; } - public Builder withTargetRootDir(String targetRootDir) { - request.setTargetRootDir(targetRootDir); + public Builder withRestoreRootDir(String restoreRootDir) { + request.setRestoreRootDir(restoreRootDir); return this; } @@ -73,7 +73,7 @@ public RestoreRequest build() { } private String backupRootDir; - private String targetRootDir; + private String restoreRootDir; private String backupId; private boolean check = false; private TableName[] fromTables; @@ -92,12 +92,12 @@ private RestoreRequest setBackupRootDir(String backupRootDir) { return this; } - public String getTargetRootDir() { - return targetRootDir; + public String getRestoreRootDir() { + return restoreRootDir; } - public RestoreRequest setTargetRootDir(String targetRootDir) { - this.targetRootDir = targetRootDir; + private RestoreRequest setRestoreRootDir(String restoreRootDir) { + this.restoreRootDir = restoreRootDir; return this; } diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java index f580fb0c47bb..f7d914d508cd 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java @@ -521,14 +521,14 @@ public void restore(RestoreRequest request) throws IOException { @Override public String backupTables(BackupRequest request) throws IOException { BackupType type = request.getBackupType(); - String targetRootDir = request.getTargetRootDir(); + String restoreRootDir = request.getRestoreRootDir(); List tableList = request.getTableList(); String backupId = BackupRestoreConstants.BACKUPID_PREFIX + EnvironmentEdgeManager.currentTime(); if (type == BackupType.INCREMENTAL) { Set incrTableSet; try (BackupSystemTable table = new BackupSystemTable(conn)) { - incrTableSet = table.getIncrementalBackupTableSet(targetRootDir); + incrTableSet = table.getIncrementalBackupTableSet(restoreRootDir); } if (incrTableSet.isEmpty()) { @@ -552,7 +552,7 @@ public String backupTables(BackupRequest request) throws IOException { if (tableList != null && !tableList.isEmpty()) { for (TableName table : tableList) { String targetTableBackupDir = - HBackupFileSystem.getTableBackupDir(targetRootDir, backupId, table); + HBackupFileSystem.getTableBackupDir(restoreRootDir, backupId, table); Path targetTableBackupDirPath = new Path(targetTableBackupDir); FileSystem outputFs = FileSystem.get(targetTableBackupDirPath.toUri(), conn.getConfiguration()); @@ -588,7 +588,7 @@ public String backupTables(BackupRequest request) throws IOException { // update table list BackupRequest.Builder builder = new BackupRequest.Builder(); request = builder.withBackupType(request.getBackupType()).withTableList(tableList) - .withTargetRootDir(request.getTargetRootDir()).withBackupSetName(request.getBackupSetName()) + .withRestoreRootDir(request.getRestoreRootDir()).withBackupSetName(request.getBackupSetName()) .withTotalTasks(request.getTotalTasks()).withBandwidthPerTasks((int) request.getBandwidth()) .build(); diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java index ce9c5bbe8fae..1e334f580ff4 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java @@ -340,7 +340,7 @@ public void execute() throws IOException { BackupRequest request = builder.withBackupType(BackupType.valueOf(args[1].toUpperCase())) .withTableList( tables != null ? Lists.newArrayList(BackupUtils.parseTableNames(tables)) : null) - .withTargetRootDir(targetBackupDir).withTotalTasks(workers) + .withRestoreRootDir(targetBackupDir).withTotalTasks(workers) .withBandwidthPerTasks(bandwidth).withBackupSetName(setName).build(); String backupId = admin.backupTables(request); System.out.println("Backup session " + backupId + " finished. Status: SUCCESS"); diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java index ed1755ad5021..914e67a6f9dc 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java @@ -187,17 +187,17 @@ public void close() { /** * Creates a backup info based on input backup request. - * @param backupId backup id - * @param type type - * @param tableList table list - * @param targetRootDir root dir - * @param workers number of parallel workers - * @param bandwidth bandwidth per worker in MB per sec + * @param backupId backup id + * @param type type + * @param tableList table list + * @param restoreRootDir root dir + * @param workers number of parallel workers + * @param bandwidth bandwidth per worker in MB per sec * @throws BackupException exception */ public BackupInfo createBackupInfo(String backupId, BackupType type, List tableList, - String targetRootDir, int workers, long bandwidth) throws BackupException { - if (targetRootDir == null) { + String restoreRootDir, int workers, long bandwidth) throws BackupException { + if (restoreRootDir == null) { throw new BackupException("Wrong backup request parameter: target backup root directory"); } @@ -230,7 +230,7 @@ public BackupInfo createBackupInfo(String backupId, BackupType type, List(backupInfo.getTables()); } @@ -330,7 +330,7 @@ protected String obtainBackupMetaDataStr(BackupInfo backupInfo) { if (sb.lastIndexOf(";") > 0) { sb.delete(sb.lastIndexOf(";"), sb.lastIndexOf(";") + 1); } - sb.append(",targetRootDir=" + backupInfo.getBackupRootDir()); + sb.append(",restoreRootDir=" + backupInfo.getBackupRootDir()); return sb.toString(); } diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java index 3b4cf0246d73..56911621159f 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java @@ -120,7 +120,7 @@ public void run(String[] backupIds) throws IOException { Path[] dirPaths = findInputDirectories(fs, backupRoot, tableNames[i], backupIds); String dirs = StringUtils.join(dirPaths, ","); - Path bulkOutputPath = BackupUtils.getBulkOutputDir( + Path bulkOutputPath = BackupUtils.getBulkOutputDir(fs, BackupUtils.getFileNameCompatibleString(tableNames[i]), getConf(), false); // Delete content if exists if (fs.exists(bulkOutputPath)) { diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java index 41511086d2be..766a99d778b8 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java @@ -56,7 +56,6 @@ public class MapReduceHFileSplitterJob extends Configured implements Tool { private static final Logger LOG = LoggerFactory.getLogger(MapReduceHFileSplitterJob.class); final static String NAME = "HFileSplitterJob"; public final static String BULK_OUTPUT_CONF_KEY = "hfile.bulk.output"; - public static final String BULK_OUTPUT_ROOT_DIR = "hfile.bulk.output.root.dir"; public final static String TABLES_KEY = "hfile.input.tables"; public final static String TABLE_MAP_KEY = "hfile.input.tablesmap"; private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name"; diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java index e6046bf5fb92..eee7b6f30c8c 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java @@ -22,6 +22,7 @@ import java.io.IOException; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.BackupRestoreConstants; @@ -50,8 +51,8 @@ public MapReduceRestoreJob() { } @Override - public void run(Path[] dirPaths, TableName[] tableNames, TableName[] newTableNames, - boolean fullBackupRestore) throws IOException { + public void run(Path[] dirPaths, TableName[] tableNames, FileSystem restoreFileSystem, + TableName[] newTableNames, boolean fullBackupRestore) throws IOException { String bulkOutputConfKey; player = new MapReduceHFileSplitterJob(); @@ -71,8 +72,8 @@ public void run(Path[] dirPaths, TableName[] tableNames, TableName[] newTableNam for (int i = 0; i < tableNames.length; i++) { LOG.info("Restore " + tableNames[i] + " into " + newTableNames[i]); - Path bulkOutputPath = BackupUtils - .getBulkOutputDir(BackupUtils.getFileNameCompatibleString(newTableNames[i]), getConf()); + Path bulkOutputPath = BackupUtils.getBulkOutputDir(restoreFileSystem, + BackupUtils.getFileNameCompatibleString(newTableNames[i]), getConf()); Configuration conf = getConf(); conf.set(bulkOutputConfKey, bulkOutputPath.toString()); String[] playerArgs = { dirs, diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java index 4f18f1b70875..b71905bcee10 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.backup.RestoreRequest; import org.apache.hadoop.hbase.backup.impl.BackupManifest; import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage; -import org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.RegionInfo; @@ -686,16 +685,8 @@ public static boolean validate(HashMap backupManifest return isValid; } - public static Path getBulkOutputDir(String tableName, Configuration conf, boolean deleteOnExit) - throws IOException { - FileSystem fs; - String bulkOutputRootDir = conf.get(MapReduceHFileSplitterJob.BULK_OUTPUT_ROOT_DIR); - if (bulkOutputRootDir != null) { - Path rootDir = new Path(bulkOutputRootDir); - fs = FileSystem.get(rootDir.toUri(), conf); - } else { - fs = FileSystem.get(conf); - } + public static Path getBulkOutputDir(FileSystem fs, String tableName, Configuration conf, + boolean deleteOnExit) throws IOException { String tmp = conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, fs.getHomeDirectory() + "/hbase-staging"); Path path = new Path(tmp + Path.SEPARATOR + "bulk_output-" + tableName + "-" @@ -706,8 +697,9 @@ public static Path getBulkOutputDir(String tableName, Configuration conf, boolea return path; } - public static Path getBulkOutputDir(String tableName, Configuration conf) throws IOException { - return getBulkOutputDir(tableName, conf, true); + public static Path getBulkOutputDir(FileSystem restoreFileSystem, String tableName, + Configuration conf) throws IOException { + return getBulkOutputDir(restoreFileSystem, tableName, conf, true); } public static String getFileNameCompatibleString(TableName table) { diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java index bf2aa14046db..1381f9853601 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java @@ -53,7 +53,6 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; /** @@ -69,16 +68,18 @@ public class RestoreTool { protected Path backupRootPath; protected String backupId; protected FileSystem fs; + protected FileSystem restoreFs; // store table name and snapshot dir mapping private final HashMap snapshotMap = new HashMap<>(); - public RestoreTool(Configuration conf, final Path backupRootPath, final String backupId) - throws IOException { + public RestoreTool(Configuration conf, final Path backupRootPath, + final FileSystem restoreFileSystem, final String backupId) throws IOException { this.conf = conf; this.backupRootPath = backupRootPath; this.backupId = backupId; this.fs = backupRootPath.getFileSystem(conf); + this.restoreFs = restoreFileSystem; } /** @@ -200,7 +201,7 @@ public void incrementalRestoreTable(Connection conn, Path tableBackupPath, Path[ } RestoreJob restoreService = BackupRestoreFactory.getRestoreJob(conf); - restoreService.run(logDirs, tableNames, newTableNames, false); + restoreService.run(logDirs, tableNames, restoreFs, newTableNames, false); } } @@ -350,8 +351,8 @@ private void createAndRestoreTable(Connection conn, TableName tableName, TableNa RestoreJob restoreService = BackupRestoreFactory.getRestoreJob(conf); Path[] paths = new Path[regionPathList.size()]; regionPathList.toArray(paths); - restoreService.run(paths, new TableName[] { tableName }, new TableName[] { newTableName }, - true); + restoreService.run(paths, new TableName[] { tableName }, restoreFs, + new TableName[] { newTableName }, true); } catch (Exception e) { LOG.error(e.toString(), e); diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java index 7b5095a897e2..dc1a3a77928b 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java @@ -362,7 +362,7 @@ protected BackupRequest createBackupRequest(BackupType type, List tab String path) { BackupRequest.Builder builder = new BackupRequest.Builder(); BackupRequest request = - builder.withBackupType(type).withTableList(tables).withTargetRootDir(path).build(); + builder.withBackupType(type).withTableList(tables).withRestoreRootDir(path).build(); return request; } diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java index 7bf7a55c599c..262bb2485c24 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java @@ -68,7 +68,7 @@ public Path run() { @Override public Path run() { try { - return BackupUtils.getBulkOutputDir("test", conf, false); + return BackupUtils.getBulkOutputDir(FileSystem.get(conf), "test", conf, false); } catch (IOException ioe) { LOG.error("Failed to get bulk output dir path", ioe); } diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java index 1ece1770489b..9a2653ccb8dc 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java @@ -130,7 +130,7 @@ public void run(String[] backupIds) throws IOException { // Find input directories for table Path[] dirPaths = findInputDirectories(fs, backupRoot, tableNames[i], backupIds); String dirs = StringUtils.join(dirPaths, ","); - Path bulkOutputPath = BackupUtils.getBulkOutputDir( + Path bulkOutputPath = BackupUtils.getBulkOutputDir(fs, BackupUtils.getFileNameCompatibleString(tableNames[i]), getConf(), false); // Delete content if exists if (fs.exists(bulkOutputPath)) { diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java index ce8c6497c9ef..3109456cab68 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.junit.BeforeClass; @@ -65,8 +64,9 @@ public void testFullRestoreRemote() throws Exception { LOG.info("backup complete"); TableName[] tableset = new TableName[] { table1 }; TableName[] tablemap = new TableName[] { table1_restore }; - getBackupAdmin().restore(BackupUtils.createRestoreRequest(BACKUP_REMOTE_ROOT_DIR, backupId, - false, tableset, tablemap, false)); + getBackupAdmin().restore(new RestoreRequest.Builder().withBackupRootDir(BACKUP_REMOTE_ROOT_DIR) + .withRestoreRootDir(BACKUP_ROOT_DIR).withBackupId(backupId).withCheck(false) + .withFromTables(tableset).withToTables(tablemap).withOvewrite(false).build()); Admin hba = TEST_UTIL.getAdmin(); assertTrue(hba.tableExists(table1_restore)); TEST_UTIL.deleteTable(table1_restore); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java index 1a0446381aed..b480e11e3706 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java @@ -256,7 +256,7 @@ private void runTestSingle(TableName table) throws IOException { List tables = Lists.newArrayList(table); BackupRequest.Builder builder = new BackupRequest.Builder(); BackupRequest request = builder.withBackupType(BackupType.FULL).withTableList(tables) - .withTargetRootDir(BACKUP_ROOT_DIR).build(); + .withRestoreRootDir(BACKUP_ROOT_DIR).build(); String backupIdFull = backup(request, client); assertTrue(checkSucceeded(backupIdFull)); @@ -271,7 +271,7 @@ private void runTestSingle(TableName table) throws IOException { // Do incremental backup builder = new BackupRequest.Builder(); request = builder.withBackupType(BackupType.INCREMENTAL).withTableList(tables) - .withTargetRootDir(BACKUP_ROOT_DIR).build(); + .withRestoreRootDir(BACKUP_ROOT_DIR).build(); String backupId = backup(request, client); assertTrue(checkSucceeded(backupId)); backupIds.add(backupId);