Skip to content

Commit

Permalink
[ML] refactor to use ByteSizeValue#of... helpers (elastic#63768) (ela…
Browse files Browse the repository at this point in the history
…stic#63802)

This is a simple find/replace with regex. 

This replaces all the `new ByteSizeValue(<value>, ByteSizeUnit.<unit)` to the correct `ByteSizeValue.of...` functions.

Its a much better abstraction and removes the need to refer to `ByteSizeUnit` all together.
  • Loading branch information
benwtrent committed Oct 16, 2020
1 parent 4515ee8 commit 1f7397f
Show file tree
Hide file tree
Showing 40 changed files with 126 additions and 157 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
Expand Down Expand Up @@ -44,11 +43,11 @@ public static class Request extends JobTaskRequest<Request> implements ToXConten
public static final ParseField EXPIRES_IN = new ParseField("expires_in");
public static final ParseField MAX_MODEL_MEMORY = new ParseField("max_model_memory");

public static final ByteSizeValue FORECAST_LOCAL_STORAGE_LIMIT = new ByteSizeValue(500, ByteSizeUnit.MB);
public static final ByteSizeValue FORECAST_LOCAL_STORAGE_LIMIT = ByteSizeValue.ofMb(500);

// Max allowed duration: 10 years
private static final TimeValue MAX_DURATION = TimeValue.parseTimeValue("3650d", "");
private static final long MIN_MODEL_MEMORY = new ByteSizeValue(1, ByteSizeUnit.MB).getBytes();
private static final long MIN_MODEL_MEMORY = ByteSizeValue.ofMb(1).getBytes();

private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);

Expand Down Expand Up @@ -188,7 +187,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep());
}
if (maxModelMemory != null) {
builder.field(MAX_MODEL_MEMORY.getPreferredName(), new ByteSizeValue(maxModelMemory).getStringRep());
builder.field(MAX_MODEL_MEMORY.getPreferredName(), ByteSizeValue.ofBytes(maxModelMemory).getStringRep());
}
builder.endObject();
return builder;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
Expand Down Expand Up @@ -40,13 +39,13 @@ public class DataFrameAnalyticsConfig implements ToXContentObject, Writeable {

public static final String TYPE = "data_frame_analytics_config";

public static final ByteSizeValue DEFAULT_MODEL_MEMORY_LIMIT = new ByteSizeValue(1, ByteSizeUnit.GB);
public static final ByteSizeValue MIN_MODEL_MEMORY_LIMIT = new ByteSizeValue(1, ByteSizeUnit.KB);
public static final ByteSizeValue DEFAULT_MODEL_MEMORY_LIMIT = ByteSizeValue.ofGb(1);
public static final ByteSizeValue MIN_MODEL_MEMORY_LIMIT = ByteSizeValue.ofKb(1);
/**
* This includes the overhead of thread stacks and data structures that the program might use that
* are not instrumented. But it does NOT include the memory used by loading the executable code.
*/
public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = new ByteSizeValue(5, ByteSizeUnit.MB);
public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = ByteSizeValue.ofMb(5);

public static final ParseField ID = new ParseField("id");
public static final ParseField DESCRIPTION = new ParseField("description");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
Expand Down Expand Up @@ -41,7 +40,7 @@ public final class InferenceToXContentCompressor {
// Either 10% of the configured JVM heap, or 1 GB, which ever is smaller
private static final long MAX_INFLATED_BYTES = Math.min(
(long)((0.10) * JvmInfo.jvmInfo().getMem().getHeapMax().getBytes()),
new ByteSizeValue(1, ByteSizeUnit.GB).getBytes());
ByteSizeValue.ofGb(1).getBytes());

private InferenceToXContentCompressor() {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.humanReadableField(
ESTIMATED_HEAP_MEMORY_USAGE_BYTES.getPreferredName(),
ESTIMATED_HEAP_MEMORY_USAGE_HUMAN,
new ByteSizeValue(estimatedHeapMemory));
ByteSizeValue.ofBytes(estimatedHeapMemory));
builder.field(ESTIMATED_OPERATIONS.getPreferredName(), estimatedOperations);
builder.field(LICENSE_LEVEL.getPreferredName(), licenseLevel.description());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
Expand Down Expand Up @@ -142,7 +141,7 @@ public static AnalysisLimits validateAndSetDefaults(@Nullable AnalysisLimits sou

if (maxModelMemoryIsSet && modelMemoryLimit > maxModelMemoryLimit.getMb()) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX,
new ByteSizeValue(modelMemoryLimit, ByteSizeUnit.MB),
ByteSizeValue.ofMb(modelMemoryLimit),
maxModelMemoryLimit));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
Expand Down Expand Up @@ -96,7 +95,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
* and the <code>normalize</code> process is not instrumented at all.) But this overhead does NOT
* include the memory used by loading the executable code.
*/
public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = new ByteSizeValue(10, ByteSizeUnit.MB);
public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = ByteSizeValue.ofMb(10);

public static final long DEFAULT_MODEL_SNAPSHOT_RETENTION_DAYS = 10;
public static final long DEFAULT_DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
Expand Down Expand Up @@ -787,7 +786,7 @@ public void testTooLowConfiguredMemoryStillStarts() throws Exception {

DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder(
buildAnalytics(jobId, sourceIndex, destIndex, null, new Classification(NESTED_FIELD)))
.setModelMemoryLimit(new ByteSizeValue(1, ByteSizeUnit.KB))
.setModelMemoryLimit(ByteSizeValue.ofKb(1))
.build();
putAnalytics(config);
// Shouldn't throw
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.ml.action.DeleteForecastAction;
Expand Down Expand Up @@ -534,7 +533,7 @@ public void testForecastWithHigherMemoryUse() throws Exception {
String forecastId = forecast(job.getId(),
TimeValue.timeValueHours(1),
TimeValue.ZERO,
new ByteSizeValue(50, ByteSizeUnit.MB).getBytes());
ByteSizeValue.ofMb(50).getBytes());

waitForecastToFinish(job.getId(), forecastId);
closeJob(job.getId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
Expand Down Expand Up @@ -506,7 +505,7 @@ public void testModelMemoryLimitLowerThanEstimatedMemoryUsage() throws Exception
}

String id = "test_model_memory_limit_lower_than_estimated_memory_usage";
ByteSizeValue modelMemoryLimit = new ByteSizeValue(1, ByteSizeUnit.MB);
ByteSizeValue modelMemoryLimit = ByteSizeValue.ofMb(1);
DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder()
.setId(id)
.setSource(new DataFrameAnalyticsSource(new String[] { sourceIndex }, null, null))
Expand Down Expand Up @@ -544,7 +543,7 @@ public void testLazyAssignmentWithModelMemoryLimitTooHighForAssignment() throws

String id = "test_lazy_assign_model_memory_limit_too_high";
// Assuming a 1TB job will never fit on the test machine - increase this when machines get really big!
ByteSizeValue modelMemoryLimit = new ByteSizeValue(1, ByteSizeUnit.TB);
ByteSizeValue modelMemoryLimit = ByteSizeValue.ofTb(1);
DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder()
.setId(id)
.setSource(new DataFrameAnalyticsSource(new String[] { sourceIndex }, null, null))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
Expand Down Expand Up @@ -67,7 +66,7 @@ public void testFailOverBasics() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(4);
ensureStableCluster(4);

Job.Builder job = createJob("fail-over-basics-job", new ByteSizeValue(2, ByteSizeUnit.MB));
Job.Builder job = createJob("fail-over-basics-job", ByteSizeValue.ofMb(2));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
ensureYellow(); // at least the primary shards of the indices a job uses should be started
Expand Down Expand Up @@ -210,7 +209,7 @@ public void testDedicatedMlNode() throws Exception {
ensureStableCluster(3);

String jobId = "dedicated-ml-node-job";
Job.Builder job = createJob(jobId, new ByteSizeValue(2, ByteSizeUnit.MB));
Job.Builder job = createJob(jobId, ByteSizeValue.ofMb(2));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();

Expand Down Expand Up @@ -288,7 +287,7 @@ public void testMaxConcurrentJobAllocations() throws Exception {
ensureYellow(); // at least the primary shards of the indices a job uses should be started
int numJobs = numMlNodes * 10;
for (int i = 0; i < numJobs; i++) {
Job.Builder job = createJob(Integer.toString(i), new ByteSizeValue(2, ByteSizeUnit.MB));
Job.Builder job = createJob(Integer.toString(i), ByteSizeValue.ofMb(2));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();

Expand Down Expand Up @@ -429,7 +428,7 @@ public void testCloseUnassignedLazyJobAndDatafeed() throws Exception {
String jobId = "test-lazy-stop";
String datafeedId = jobId + "-datafeed";
// Assume the test machine won't have space to assign a 2TB job
Job.Builder job = createJob(jobId, new ByteSizeValue(2, ByteSizeUnit.TB), true);
Job.Builder job = createJob(jobId, ByteSizeValue.ofTb(2), true);
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
Expand Down Expand Up @@ -205,7 +204,7 @@ public void testUpdate() throws Exception {

DataFrameAnalyticsConfigUpdate configUpdate =
new DataFrameAnalyticsConfigUpdate.Builder(configId)
.setModelMemoryLimit(new ByteSizeValue(1024))
.setModelMemoryLimit(ByteSizeValue.ofBytes(1024))
.build();

blockingCall(
Expand All @@ -220,7 +219,7 @@ public void testUpdate() throws Exception {
is(equalTo(
new DataFrameAnalyticsConfig.Builder(initialConfig)
.setDescription("description-1")
.setModelMemoryLimit(new ByteSizeValue(1024))
.setModelMemoryLimit(ByteSizeValue.ofBytes(1024))
.build())));
}
{ // Noop update
Expand All @@ -241,7 +240,7 @@ public void testUpdate() throws Exception {
is(equalTo(
new DataFrameAnalyticsConfig.Builder(initialConfig)
.setDescription("description-1")
.setModelMemoryLimit(new ByteSizeValue(1024))
.setModelMemoryLimit(ByteSizeValue.ofBytes(1024))
.build())));
}
{ // Update that changes both description and model memory limit
Expand All @@ -251,7 +250,7 @@ public void testUpdate() throws Exception {
DataFrameAnalyticsConfigUpdate configUpdate =
new DataFrameAnalyticsConfigUpdate.Builder(configId)
.setDescription("description-2")
.setModelMemoryLimit(new ByteSizeValue(2048))
.setModelMemoryLimit(ByteSizeValue.ofBytes(2048))
.build();

blockingCall(
Expand All @@ -266,7 +265,7 @@ public void testUpdate() throws Exception {
is(equalTo(
new DataFrameAnalyticsConfig.Builder(initialConfig)
.setDescription("description-2")
.setModelMemoryLimit(new ByteSizeValue(2048))
.setModelMemoryLimit(ByteSizeValue.ofBytes(2048))
.build())));
}
{ // Update that applies security headers
Expand All @@ -289,7 +288,7 @@ public void testUpdate() throws Exception {
is(equalTo(
new DataFrameAnalyticsConfig.Builder(initialConfig)
.setDescription("description-2")
.setModelMemoryLimit(new ByteSizeValue(2048))
.setModelMemoryLimit(ByteSizeValue.ofBytes(2048))
.setHeaders(securityHeaders)
.build())));
}
Expand Down Expand Up @@ -332,7 +331,7 @@ public void testUpdate_UpdateCannotBeAppliedWhenTaskIsRunning() throws Interrupt

DataFrameAnalyticsConfigUpdate configUpdate =
new DataFrameAnalyticsConfigUpdate.Builder(configId)
.setModelMemoryLimit(new ByteSizeValue(2048, ByteSizeUnit.MB))
.setModelMemoryLimit(ByteSizeValue.ofMb(2048))
.build();

ClusterState clusterState = clusterStateWithRunningAnalyticsTask(configId, DataFrameAnalyticsState.ANALYZING);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ public void testCrud() throws InterruptedException {

AtomicReference<Job> updateJobResponseHolder = new AtomicReference<>();
blockingCall(actionListener -> jobConfigProvider.updateJob
(jobId, jobUpdate, new ByteSizeValue(32), actionListener), updateJobResponseHolder, exceptionHolder);
(jobId, jobUpdate, ByteSizeValue.ofBytes(32), actionListener), updateJobResponseHolder, exceptionHolder);
assertNull(exceptionHolder.get());
assertEquals("This job has been updated", updateJobResponseHolder.get().getDescription());

Expand Down Expand Up @@ -210,7 +210,7 @@ public void testUpdateWithAValidationError() throws Exception {
.build();

AtomicReference<Job> updateJobResponseHolder = new AtomicReference<>();
blockingCall(actionListener -> jobConfigProvider.updateJob(jobId, invalidUpdate, new ByteSizeValue(32),
blockingCall(actionListener -> jobConfigProvider.updateJob(jobId, invalidUpdate, ByteSizeValue.ofBytes(32),
actionListener), updateJobResponseHolder, exceptionHolder);
assertNull(updateJobResponseHolder.get());
assertNotNull(exceptionHolder.get());
Expand All @@ -235,7 +235,7 @@ public void testUpdateWithValidator() throws Exception {
AtomicReference<Job> updateJobResponseHolder = new AtomicReference<>();
// update with the no-op validator
blockingCall(actionListener -> jobConfigProvider.updateJobWithValidation(
jobId, jobUpdate, new ByteSizeValue(32), validator, actionListener), updateJobResponseHolder, exceptionHolder);
jobId, jobUpdate, ByteSizeValue.ofBytes(32), validator, actionListener), updateJobResponseHolder, exceptionHolder);

assertNull(exceptionHolder.get());
assertNotNull(updateJobResponseHolder.get());
Expand All @@ -247,7 +247,7 @@ public void testUpdateWithValidator() throws Exception {

updateJobResponseHolder.set(null);
// Update with a validator that errors
blockingCall(actionListener -> jobConfigProvider.updateJobWithValidation(jobId, jobUpdate, new ByteSizeValue(32),
blockingCall(actionListener -> jobConfigProvider.updateJobWithValidation(jobId, jobUpdate, ByteSizeValue.ofBytes(32),
validatorWithAnError, actionListener),
updateJobResponseHolder, exceptionHolder);

Expand Down
Loading

0 comments on commit 1f7397f

Please sign in to comment.