Skip to content

Commit

Permalink
HBASE-27541 Add support for defining restore hfile output path
Browse files Browse the repository at this point in the history
  • Loading branch information
Jarryd Lee committed Dec 21, 2022
1 parent 222ec68 commit 5a29f8a
Show file tree
Hide file tree
Showing 4 changed files with 32 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,11 @@ public Builder withBackupRootDir(String backupRootDir) {
return this;
}

public Builder withTargetRootDir(String targetRootDir) {
request.setTargetRootDir(targetRootDir);
return this;
}

public Builder withBackupId(String backupId) {
request.setBackupId(backupId);
return this;
Expand Down Expand Up @@ -68,6 +73,7 @@ public RestoreRequest build() {
}

private String backupRootDir;
private String targetRootDir;
private String backupId;
private boolean check = false;
private TableName[] fromTables;
Expand All @@ -86,6 +92,15 @@ private RestoreRequest setBackupRootDir(String backupRootDir) {
return this;
}

public String getTargetRootDir() {
return targetRootDir;
}

public RestoreRequest setTargetRootDir(String targetRootDir) {
this.targetRootDir = targetRootDir;
return this;
}

public String getBackupId() {
return backupId;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.RestoreRequest;
import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
import org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob;
import org.apache.hadoop.hbase.backup.util.RestoreTool;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
Expand All @@ -55,11 +56,11 @@ public class RestoreTablesClient {
private String backupId;
private TableName[] sTableArray;
private TableName[] tTableArray;
private String targetRootDir;
private String backupRootDir;
private boolean isOverwrite;

public RestoreTablesClient(Connection conn, RestoreRequest request) {
this.targetRootDir = request.getBackupRootDir();
this.backupRootDir = request.getBackupRootDir();
this.backupId = request.getBackupId();
this.sTableArray = request.getFromTables();
this.tTableArray = request.getToTables();
Expand All @@ -69,6 +70,9 @@ public RestoreTablesClient(Connection conn, RestoreRequest request) {
this.isOverwrite = request.isOverwrite();
this.conn = conn;
this.conf = conn.getConfiguration();
if (request.getTargetRootDir() != null) {
conf.set(MapReduceHFileSplitterJob.BULK_OUTPUT_ROOT_DIR, request.getTargetRootDir());
}
}

/**
Expand Down Expand Up @@ -249,7 +253,7 @@ public void execute() throws IOException {
// case RESTORE_IMAGES:
HashMap<TableName, BackupManifest> backupManifestMap = new HashMap<>();
// check and load backup image manifest for the tables
Path rootPath = new Path(targetRootDir);
Path rootPath = new Path(backupRootDir);
HBackupFileSystem.checkImageManifestExist(backupManifestMap, sTableArray, conf, rootPath,
backupId);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ public class MapReduceHFileSplitterJob extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(MapReduceHFileSplitterJob.class);
final static String NAME = "HFileSplitterJob";
public final static String BULK_OUTPUT_CONF_KEY = "hfile.bulk.output";
public static final String BULK_OUTPUT_ROOT_DIR = "hfile.bulk.output.root.dir";
public final static String TABLES_KEY = "hfile.input.tables";
public final static String TABLE_MAP_KEY = "hfile.input.tablesmap";
private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import org.apache.hadoop.hbase.backup.RestoreRequest;
import org.apache.hadoop.hbase.backup.impl.BackupManifest;
import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
import org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionInfo;
Expand Down Expand Up @@ -687,7 +688,14 @@ public static boolean validate(HashMap<TableName, BackupManifest> backupManifest

public static Path getBulkOutputDir(String tableName, Configuration conf, boolean deleteOnExit)
throws IOException {
FileSystem fs = FileSystem.get(conf);
FileSystem fs;
String bulkOutputRootDir = conf.get(MapReduceHFileSplitterJob.BULK_OUTPUT_ROOT_DIR);
if (bulkOutputRootDir != null) {
Path rootDir = new Path(bulkOutputRootDir);
fs = FileSystem.get(rootDir.toUri(), conf);
} else {
fs = FileSystem.get(conf);
}
String tmp =
conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, fs.getHomeDirectory() + "/hbase-staging");
Path path = new Path(tmp + Path.SEPARATOR + "bulk_output-" + tableName + "-"
Expand Down

0 comments on commit 5a29f8a

Please sign in to comment.