Skip to content

Commit

Permalink
Allow supplying custom workflow yaml for infer_with_model job (#7902)
Browse files Browse the repository at this point in the history
* WIP: Allow supplying full workflow yaml for infer_with_model job

* fix request logging rounding error

* show workflow yaml editor (collapsed by default) when configuring inference for custom models

* remove new_segmentation_layer_name param

* Add default workflow to custom model inference

* only use custom workflow if advanced collapse is enabled & make enabling more explicit via using a checkbox

* remove option to specify output segmentation layer name

* reset application.conf

* rename job param

* rename workflow template file and rename its imported var to full caps

---------

Co-authored-by: Philipp Otto <philipp.4096@gmail.com>
Co-authored-by: Michael Büßemeyer <michael.buessemeyer@student.hpi.de>
  • Loading branch information
3 people committed Aug 5, 2024
1 parent ab290ec commit 24e1e00
Show file tree
Hide file tree
Showing 9 changed files with 102 additions and 174 deletions.
13 changes: 6 additions & 7 deletions app/controllers/AiModelController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ case class RunInferenceParameters(annotationId: Option[ObjectId],
datasetName: String,
colorLayerName: String,
boundingBox: String,
newSegmentationLayerName: String,
newDatasetName: String,
maskAnnotationLayerName: Option[String])
maskAnnotationLayerName: Option[String],
workflowYaml: Option[String])

object RunInferenceParameters {
implicit val jsonFormat: OFormat[RunInferenceParameters] = Json.format[RunInferenceParameters]
Expand Down Expand Up @@ -135,7 +135,7 @@ class AiModelController @Inject()(
"training_annotations" -> Json.toJson(trainingAnnotations),
"organization_name" -> organization.name,
"model_id" -> modelId,
"workflow_yaml" -> request.body.workflowYaml
"custom_workflow_provided_by_user" -> request.body.workflowYaml
)
existingAiModelsCount <- aiModelDAO.countByNameAndOrganization(request.body.name,
request.identity._organization)
Expand Down Expand Up @@ -168,7 +168,6 @@ class AiModelController @Inject()(
_ <- aiModelDAO.findOne(request.body.aiModelId) ?~> "aiModel.notFound"
_ <- datasetService.assertValidDatasetName(request.body.newDatasetName)
_ <- datasetService.assertNewDatasetName(request.body.newDatasetName, organization._id)
_ <- datasetService.assertValidLayerNameLax(request.body.newSegmentationLayerName)
jobCommand = JobCommand.infer_with_model
boundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox
commandArgs = Json.obj(
Expand All @@ -177,8 +176,8 @@ class AiModelController @Inject()(
"color_layer_name" -> request.body.colorLayerName,
"bounding_box" -> boundingBox.toLiteral,
"model_id" -> request.body.aiModelId,
"new_segmentation_layer_name" -> request.body.newSegmentationLayerName,
"new_dataset_name" -> request.body.newDatasetName
"new_dataset_name" -> request.body.newDatasetName,
"custom_workflow_provided_by_user" -> request.body.workflowYaml
)
newInferenceJob <- jobService.submitJob(jobCommand, commandArgs, request.identity, dataStore.name) ?~> "job.couldNotRunInferWithModel"
newAiInference = AiInference(
Expand All @@ -189,7 +188,7 @@ class AiModelController @Inject()(
_annotation = request.body.annotationId,
boundingBox = boundingBox,
_inferenceJob = newInferenceJob._id,
newSegmentationLayerName = request.body.newSegmentationLayerName,
newSegmentationLayerName = "segmentation",
maskAnnotationLayerName = request.body.maskAnnotationLayerName
)
_ <- aiInferenceDAO.insertOne(newAiInference)
Expand Down
6 changes: 0 additions & 6 deletions app/controllers/JobController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,6 @@ class JobController @Inject()(
datasetName: String,
layerName: String,
bbox: String,
outputSegmentationLayerName: String,
newDatasetName: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
Expand All @@ -234,7 +233,6 @@ class JobController @Inject()(
"dataset.notFound",
datasetName) ~> NOT_FOUND
_ <- datasetService.assertValidDatasetName(newDatasetName)
_ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName)
_ <- datasetService.assertValidLayerNameLax(layerName)
multiUser <- multiUserDAO.findOne(request.identity._multiUser)
_ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None))
Expand All @@ -244,7 +242,6 @@ class JobController @Inject()(
"dataset_name" -> datasetName,
"new_dataset_name" -> newDatasetName,
"layer_name" -> layerName,
"output_segmentation_layer_name" -> outputSegmentationLayerName,
"bbox" -> bbox,
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunNeuronInferral"
Expand All @@ -257,7 +254,6 @@ class JobController @Inject()(
datasetName: String,
layerName: String,
bbox: String,
outputSegmentationLayerName: String,
newDatasetName: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
Expand All @@ -269,7 +265,6 @@ class JobController @Inject()(
"dataset.notFound",
datasetName) ~> NOT_FOUND
_ <- datasetService.assertValidDatasetName(newDatasetName)
_ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName)
_ <- datasetService.assertValidLayerNameLax(layerName)
multiUser <- multiUserDAO.findOne(request.identity._multiUser)
_ <- bool2Fox(multiUser.isSuperUser) ?~> "job.inferMitochondria.notAllowed.onlySuperUsers"
Expand All @@ -280,7 +275,6 @@ class JobController @Inject()(
"dataset_name" -> datasetName,
"new_dataset_name" -> newDatasetName,
"layer_name" -> layerName,
"output_segmentation_layer_name" -> outputSegmentationLayerName,
"bbox" -> bbox,
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunInferMitochondria"
Expand Down
4 changes: 2 additions & 2 deletions conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -263,8 +263,8 @@ POST /jobs/run/computeMeshFile/:organizationName/:datasetName
POST /jobs/run/computeSegmentIndexFile/:organizationName/:datasetName controllers.JobController.runComputeSegmentIndexFileJob(organizationName: String, datasetName: String, layerName: String)
POST /jobs/run/exportTiff/:organizationName/:datasetName controllers.JobController.runExportTiffJob(organizationName: String, datasetName: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean)
POST /jobs/run/inferNuclei/:organizationName/:datasetName controllers.JobController.runInferNucleiJob(organizationName: String, datasetName: String, layerName: String, newDatasetName: String)
POST /jobs/run/inferNeurons/:organizationName/:datasetName controllers.JobController.runInferNeuronsJob(organizationName: String, datasetName: String, layerName: String, bbox: String, outputSegmentationLayerName: String, newDatasetName: String)
POST /jobs/run/inferMitochondria/:organizationName/:datasetName controllers.JobController.runInferMitochondriaJob(organizationName: String, datasetName: String, layerName: String, bbox: String, outputSegmentationLayerName: String, newDatasetName: String)
POST /jobs/run/inferNeurons/:organizationName/:datasetName controllers.JobController.runInferNeuronsJob(organizationName: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String)
POST /jobs/run/inferMitochondria/:organizationName/:datasetName controllers.JobController.runInferMitochondriaJob(organizationName: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String)
POST /jobs/run/alignSections/:organizationName/:datasetName controllers.JobController.runAlignSectionsJob(organizationName: String, datasetName: String, layerName: String, newDatasetName: String)
POST /jobs/run/materializeVolumeAnnotation/:organizationName/:datasetName controllers.JobController.runMaterializeVolumeAnnotationJob(organizationName: String, datasetName: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String])
POST /jobs/run/findLargestSegmentId/:organizationName/:datasetName controllers.JobController.runFindLargestSegmentIdJob(organizationName: String, datasetName: String, layerName: String)
Expand Down
12 changes: 1 addition & 11 deletions frontend/javascripts/admin/api/jobs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -185,13 +185,11 @@ export function startNeuronInferralJob(
datasetName: string,
layerName: string,
bbox: Vector6,
outputSegmentationLayerName: string,
newDatasetName: string,
): Promise<APIJob> {
const urlParams = new URLSearchParams({
layerName,
bbox: bbox.join(","),
outputSegmentationLayerName,
newDatasetName,
});
return Request.receiveJSON(
Expand Down Expand Up @@ -224,7 +222,6 @@ function startSegmentationAnnotationDependentJob(
newDatasetName: string,
annotationId: string,
annotationType: APIAnnotationType,
outputSegmentationLayerName?: string,
mergeSegments?: boolean,
): Promise<APIJob> {
const requestURL = new URL(
Expand All @@ -238,9 +235,6 @@ function startSegmentationAnnotationDependentJob(
requestURL.searchParams.append("annotationId", annotationId);
requestURL.searchParams.append("annotationType", annotationType);
requestURL.searchParams.append("newDatasetName", newDatasetName);
if (outputSegmentationLayerName != null) {
requestURL.searchParams.append("outputSegmentationLayerName", outputSegmentationLayerName);
}
if (mergeSegments != null) {
requestURL.searchParams.append("mergeSegments", mergeSegments.toString());
}
Expand All @@ -255,7 +249,6 @@ export function startMaterializingVolumeAnnotationJob(
fallbackLayerName: string,
volumeLayerName: string | null | undefined,
newDatasetName: string,
outputSegmentationLayerName: string,
annotationId: string,
annotationType: APIAnnotationType,
mergeSegments: boolean,
Expand All @@ -269,7 +262,6 @@ export function startMaterializingVolumeAnnotationJob(
newDatasetName,
annotationId,
annotationType,
outputSegmentationLayerName,
mergeSegments,
);
}
Expand All @@ -279,13 +271,11 @@ export function startMitochondriaInferralJob(
datasetName: string,
layerName: string,
bbox: Vector6,
outputSegmentationLayerName: string,
newDatasetName: string,
): Promise<APIJob> {
const urlParams = new URLSearchParams({
layerName,
bbox: bbox.join(","),
outputSegmentationLayerName,
newDatasetName,
});
return Request.receiveJSON(
Expand Down Expand Up @@ -344,8 +334,8 @@ type RunInferenceParameters = {
datasetName: string;
colorLayerName: string;
boundingBox: Vector6;
newSegmentationLayerName: string;
newDatasetName: string;
workflowYaml?: string;
// maskAnnotationLayerName?: string | null
};

Expand Down
2 changes: 1 addition & 1 deletion frontend/javascripts/admin/job/job_list_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ function JobListView() {
return (
<span>
{job.state === "SUCCESS" &&
"The model may now be selected from the &quot;AI Analysis&quot; button when viewing a dataset."}
"The model may now be selected from the AI Analysis button when viewing a dataset."}
</span>
);
} else {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
export default `predict:
task: PredictTask
distribution:
default:
processes: 2
inputs:
model: TO_BE_SET_BY_WORKER
config:
name: predict
datasource_config: TO_BE_SET_BY_WORKER
# your additional config keys here
# your additional tasks here
publish_dataset_meshes:
task: PublishDatasetTask
inputs:
dataset: # your dataset here
config:
name: TO_BE_SET_BY_WORKER
public_directory: TO_BE_SET_BY_WORKER
webknossos_organization: TO_BE_SET_BY_WORKER
use_symlinks: False
move_dataset_symlink_artifact: True`;
Loading

0 comments on commit 24e1e00

Please sign in to comment.