Skip to content
Permalink
Browse files
[MINOR] Deprecate older configs (#3464)
Rename and deprecate props in HoodieWriteConfig

Rename and deprecate older props
  • Loading branch information
codope committed Aug 13, 2021
1 parent 76bc686 commit 0544d70d8f4204f4e5edfe9144c17f1ed221eb7c
Showing 49 changed files with 688 additions and 400 deletions.
@@ -204,14 +204,14 @@ public String showLogFileRecords(
.getCommitTimeline().lastInstant().get().getTimestamp())
.withReadBlocksLazily(
Boolean.parseBoolean(
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLED_PROP.defaultValue()))
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLED.defaultValue()))
.withReverseReader(
Boolean.parseBoolean(
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLED_PROP.defaultValue()))
.withBufferSize(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE_PROP.defaultValue())
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLED.defaultValue()))
.withBufferSize(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE.defaultValue())
.withMaxMemorySizeInBytes(
HoodieMemoryConfig.DEFAULT_MAX_MEMORY_FOR_SPILLABLE_MAP_IN_BYTES)
.withSpillableMapBasePath(HoodieMemoryConfig.SPILLABLE_MAP_BASE_PATH_PROP.defaultValue())
.withSpillableMapBasePath(HoodieMemoryConfig.SPILLABLE_MAP_BASE_PATH.defaultValue())
.withDiskMapType(HoodieCommonConfig.SPILLABLE_DISK_MAP_TYPE.defaultValue())
.withBitCaskDiskMapCompressionEnabled(HoodieCommonConfig.DISK_MAP_BITCASK_COMPRESSION_ENABLED.defaultValue())
.build();
@@ -361,7 +361,7 @@ private static int doBootstrap(JavaSparkContext jsc, String tableName, String ta
TypedProperties properties = propsFilePath == null ? UtilHelpers.buildProperties(configs)
: UtilHelpers.readConfig(FSUtils.getFs(propsFilePath, jsc.hadoopConfiguration()), new Path(propsFilePath), configs).getConfig();

properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_BASE_PATH_PROP.key(), sourcePath);
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_BASE_PATH.key(), sourcePath);

if (!StringUtils.isNullOrEmpty(keyGenerator) && KeyGeneratorType.getNames().contains(keyGenerator.toUpperCase(Locale.ROOT))) {
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_KEYGEN_TYPE.key(), keyGenerator.toUpperCase(Locale.ROOT));
@@ -208,12 +208,12 @@ public void testShowLogFileRecordsWithMerge() throws IOException, InterruptedExc
HoodieMemoryConfig.DEFAULT_MAX_MEMORY_FOR_SPILLABLE_MAP_IN_BYTES)
.withReadBlocksLazily(
Boolean.parseBoolean(
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLED_PROP.defaultValue()))
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLED.defaultValue()))
.withReverseReader(
Boolean.parseBoolean(
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLED_PROP.defaultValue()))
.withBufferSize(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE_PROP.defaultValue())
.withSpillableMapBasePath(HoodieMemoryConfig.SPILLABLE_MAP_BASE_PATH_PROP.defaultValue())
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLED.defaultValue()))
.withBufferSize(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE.defaultValue())
.withSpillableMapBasePath(HoodieMemoryConfig.SPILLABLE_MAP_BASE_PATH.defaultValue())
.withDiskMapType(HoodieCommonConfig.SPILLABLE_DISK_MAP_TYPE.defaultValue())
.withBitCaskDiskMapCompressionEnabled(HoodieCommonConfig.DISK_MAP_BITCASK_COMPRESSION_ENABLED.defaultValue())
.build();
@@ -83,7 +83,7 @@ private String getApiKey() {
}

private String getUrl() {
return writeConfig.getString(HoodieWriteCommitCallbackConfig.CALLBACK_HTTP_URL_PROP);
return writeConfig.getString(HoodieWriteCommitCallbackConfig.CALLBACK_HTTP_URL);
}

private CloseableHttpClient getClient() {
@@ -39,7 +39,7 @@ public static HoodieWriteCommitCallback create(HoodieWriteConfig config) {
return (HoodieWriteCommitCallback) instance;
} else {
throw new HoodieCommitCallbackException(String.format("The value of the config option %s can not be null or "
+ "empty", HoodieWriteCommitCallbackConfig.CALLBACK_CLASS_PROP.key()));
+ "empty", HoodieWriteCommitCallbackConfig.CALLBACK_CLASS.key()));
}
}

@@ -453,19 +453,19 @@ protected void runTableServicesInline(HoodieTable<T, I, K, O> table, HoodieCommi
// Do an inline compaction if enabled
if (config.inlineCompactionEnabled()) {
runAnyPendingCompactions(table);
metadata.addMetadata(HoodieCompactionConfig.INLINE_COMPACT_PROP.key(), "true");
metadata.addMetadata(HoodieCompactionConfig.INLINE_COMPACT.key(), "true");
inlineCompact(extraMetadata);
} else {
metadata.addMetadata(HoodieCompactionConfig.INLINE_COMPACT_PROP.key(), "false");
metadata.addMetadata(HoodieCompactionConfig.INLINE_COMPACT.key(), "false");
}

// Do an inline clustering if enabled
if (config.inlineClusteringEnabled()) {
runAnyPendingClustering(table);
metadata.addMetadata(HoodieClusteringConfig.INLINE_CLUSTERING_PROP.key(), "true");
metadata.addMetadata(HoodieClusteringConfig.INLINE_CLUSTERING.key(), "true");
inlineCluster(extraMetadata);
} else {
metadata.addMetadata(HoodieClusteringConfig.INLINE_CLUSTERING_PROP.key(), "false");
metadata.addMetadata(HoodieClusteringConfig.INLINE_CLUSTERING.key(), "false");
}
}
}
@@ -44,11 +44,13 @@
+ " writers and new hudi writers in parallel, to validate the migration.")
public class HoodieBootstrapConfig extends HoodieConfig {

public static final ConfigProperty<String> BOOTSTRAP_BASE_PATH_PROP = ConfigProperty
public static final ConfigProperty<String> BOOTSTRAP_BASE_PATH = ConfigProperty
.key("hoodie.bootstrap.base.path")
.noDefaultValue()
.sinceVersion("0.6.0")
.withDocumentation("Base path of the dataset that needs to be bootstrapped as a Hudi table");
@Deprecated
public static final String BOOTSTRAP_BASE_PATH_PROP = BOOTSTRAP_BASE_PATH.key();

public static final ConfigProperty<String> BOOTSTRAP_MODE_SELECTOR = ConfigProperty
.key("hoodie.bootstrap.mode.selector")
@@ -100,11 +102,13 @@ public class HoodieBootstrapConfig extends HoodieConfig {
+ "METADATA_ONLY will generate just skeleton base files with keys/footers, avoiding full cost of rewriting the dataset. "
+ "FULL_RECORD will perform a full copy/rewrite of the data as a Hudi table.");

public static final ConfigProperty<String> BOOTSTRAP_INDEX_CLASS_PROP = ConfigProperty
public static final ConfigProperty<String> BOOTSTRAP_INDEX_CLASS = ConfigProperty
.key("hoodie.bootstrap.index.class")
.defaultValue(HFileBootstrapIndex.class.getName())
.sinceVersion("0.6.0")
.withDocumentation("Implementation to use, for mapping a skeleton base file to a boostrap base file.");
@Deprecated
public static final String BOOTSTRAP_INDEX_CLASS_PROP = BOOTSTRAP_INDEX_CLASS.key();

private HoodieBootstrapConfig() {
super();
@@ -126,7 +130,7 @@ public Builder fromFile(File propertiesFile) throws IOException {
}

public Builder withBootstrapBasePath(String basePath) {
bootstrapConfig.setValue(BOOTSTRAP_BASE_PATH_PROP, basePath);
bootstrapConfig.setValue(BOOTSTRAP_BASE_PATH, basePath);
return this;
}

@@ -178,7 +182,7 @@ public Builder fromProperties(Properties props) {

public HoodieBootstrapConfig build() {
// TODO: use infer function instead
bootstrapConfig.setDefaultValue(BOOTSTRAP_INDEX_CLASS_PROP, HoodieTableConfig.getDefaultBootstrapIndexClass(
bootstrapConfig.setDefaultValue(BOOTSTRAP_INDEX_CLASS, HoodieTableConfig.getDefaultBootstrapIndexClass(
bootstrapConfig.getProps()));
bootstrapConfig.setDefaults(HoodieBootstrapConfig.class.getName());
return bootstrapConfig;
@@ -62,17 +62,21 @@ public class HoodieClusteringConfig extends HoodieConfig {
+ " clustering plan is executed. By default, we sort the file groups in th plan by the specified columns, while "
+ " meeting the configured target file sizes.");

public static final ConfigProperty<String> INLINE_CLUSTERING_PROP = ConfigProperty
public static final ConfigProperty<String> INLINE_CLUSTERING = ConfigProperty
.key("hoodie.clustering.inline")
.defaultValue("false")
.sinceVersion("0.7.0")
.withDocumentation("Turn on inline clustering - clustering will be run after each write operation is complete");
@Deprecated
public static final String INLINE_CLUSTERING_PROP = INLINE_CLUSTERING.key();

public static final ConfigProperty<String> INLINE_CLUSTERING_MAX_COMMIT_PROP = ConfigProperty
public static final ConfigProperty<String> INLINE_CLUSTERING_MAX_COMMIT = ConfigProperty
.key("hoodie.clustering.inline.max.commits")
.defaultValue("4")
.sinceVersion("0.7.0")
.withDocumentation("Config to control frequency of clustering planning");
@Deprecated
public static final String INLINE_CLUSTERING_MAX_COMMIT_PROP = INLINE_CLUSTERING_MAX_COMMIT.key();

public static final ConfigProperty<String> ASYNC_CLUSTERING_MAX_COMMIT_PROP = ConfigProperty
.key("hoodie.clustering.async.max.commits")
@@ -118,18 +122,22 @@ public class HoodieClusteringConfig extends HoodieConfig {
.sinceVersion("0.7.0")
.withDocumentation("Columns to sort the data by when clustering");

public static final ConfigProperty<String> CLUSTERING_UPDATES_STRATEGY_PROP = ConfigProperty
public static final ConfigProperty<String> CLUSTERING_UPDATES_STRATEGY = ConfigProperty
.key("hoodie.clustering.updates.strategy")
.defaultValue("org.apache.hudi.client.clustering.update.strategy.SparkRejectUpdateStrategy")
.sinceVersion("0.7.0")
.withDocumentation("Determines how to handle updates, deletes to file groups that are under clustering."
+ " Default strategy just rejects the update");
@Deprecated
public static final String CLUSTERING_UPDATES_STRATEGY_PROP = CLUSTERING_UPDATES_STRATEGY.key();

public static final ConfigProperty<String> ASYNC_CLUSTERING_ENABLE = ConfigProperty
.key("hoodie.clustering.async.enabled")
.defaultValue("false")
.sinceVersion("0.7.0")
.withDocumentation("Enable running of clustering service, asynchronously as inserts happen on the table.");
@Deprecated
public static final String ASYNC_CLUSTERING_ENABLE_OPT_KEY = "hoodie.clustering.async.enabled";

public static final ConfigProperty<Boolean> CLUSTERING_PRESERVE_HOODIE_COMMIT_METADATA = ConfigProperty
.key("hoodie.clustering.preserve.commit.metadata")
@@ -202,12 +210,12 @@ public Builder withClusteringTargetFileMaxBytes(long targetFileSize) {
}

public Builder withInlineClustering(Boolean inlineClustering) {
clusteringConfig.setValue(INLINE_CLUSTERING_PROP, String.valueOf(inlineClustering));
clusteringConfig.setValue(INLINE_CLUSTERING, String.valueOf(inlineClustering));
return this;
}

public Builder withInlineClusteringNumCommits(int numCommits) {
clusteringConfig.setValue(INLINE_CLUSTERING_MAX_COMMIT_PROP, String.valueOf(numCommits));
clusteringConfig.setValue(INLINE_CLUSTERING_MAX_COMMIT, String.valueOf(numCommits));
return this;
}

@@ -222,7 +230,7 @@ public Builder fromProperties(Properties props) {
}

public Builder withClusteringUpdatesStrategy(String updatesStrategyClass) {
clusteringConfig.setValue(CLUSTERING_UPDATES_STRATEGY_PROP, updatesStrategyClass);
clusteringConfig.setValue(CLUSTERING_UPDATES_STRATEGY, updatesStrategyClass);
return this;
}

0 comments on commit 0544d70

Please sign in to comment.