Skip to content

Commit

Permalink
add clustering yaml files and update bps templates
Browse files Browse the repository at this point in the history
  • Loading branch information
Kenneth R. Herner committed Jun 5, 2023
1 parent 89b0a3d commit d88c0eb
Show file tree
Hide file tree
Showing 7 changed files with 154 additions and 12 deletions.
22 changes: 16 additions & 6 deletions bps/bps_ApPipe.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,17 @@ project: ApPipe
campaign: DM-xxxxx

submitPath: ${PWD}/bps/{outputRun}
computeSite: ncsapool
computeSite:
site:
s3df:
profile:
condor:
+Walltime: 43200
# Memory allocated for each quantum, in MB (if unspecified, default is 2048)
requestMemory: 4096
includeConfigs:
- ${CTRL_BPS_DIR}/python/lsst/ctrl/bps/etc/bps_defaults.yaml
- ${AP_PIPE_DIR}/bps/clustering/clustering_ApPipe.yaml

# UPDATE THIS set of arguments to mirror the ones you use with command-line pipetask run
payload:
Expand All @@ -33,11 +39,15 @@ pipetask:
# Set the default walltime as appropriate for what you are running (12 hours below).

# site:
# ncsapool:
# profile:
# condor:
# +Walltime: 43200

# s3df:
# profile:
# condor:
# +Walltime: 43200

# Set the appropriate wms service class for the batch system
# you're using (HTCondor, Parsl, Slurm, triple Slurm, etc.)
# See https://developer.lsst.io/usdf/batch.html for details of doing this at S3DF.
wmsServiceClass: lsst.ctrl.bps.htcondor.HTCondorService

# Extra options for qgraph generation; will skip existing quanta in the output collection
# Note that --skip-existing-in will not work the first time you run a workflow.
Expand Down
17 changes: 13 additions & 4 deletions bps/bps_ApPipeMultiTractFakes.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,18 @@ project: ApPipeMultiTractFakes
campaign: DM-xxxxx

submitPath: ${PWD}/bps/{outputRun}
computeSite: ncsapool
computeSite:
site:
s3df:
profile:
condor:
+Walltime: 43200
# Memory allocated for each quantum, in MB (if unspecified, default is 2048)
requestMemory: 4096
includeConfigs:
- ${CTRL_BPS_DIR}/python/lsst/ctrl/bps/etc/bps_defaults.yaml

- ${AP_PIPE_DIR}/bps/clustering/clustering_ApPipeWithFakes.yaml

# UPDATE THIS set of arguments to mirror the ones you use with command-line pipetask run
payload:
payloadName: DM-xxxxx-example
Expand All @@ -25,18 +31,21 @@ pipetask:
subtractImages:
requestMemory: 8192


# The default BPS walltime is on the order of 3 days.
# That's much more than the vast majority of jobs need,
# and it can cause your jobs not to run if you're too close to a maintenance window.
# Set the default walltime as appropriate for what you are running (12 hours below).

# site:
# ncsapool:
# s3df:
# profile:
# condor:
# +Walltime: 43200

# Set the appropriate wms service class for the batch system
# you're using (HTCondor, Parsl, Slurm, triple Slurm, etc.)
# See https://developer.lsst.io/usdf/batch.html for details of doing this at S3DF.
wmsServiceClass: lsst.ctrl.bps.htcondor.HTCondorService

# Extra options for qgraph generation; will skip existing quanta in the output collection
# Note that --skip-existing-in will not work the first time you run a workflow.
Expand Down
14 changes: 12 additions & 2 deletions bps/bps_ApTemplate.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,21 @@ project: ApTemplate
campaign: DM-xxxxx

submitPath: ${PWD}/bps/{outputRun}
computeSite: ncsapool
computeSite:
# The default BPS walltime is on the order of 3 days.
# That's much more than the vast majority of jobs need,
# and it can cause your jobs not to run if you're too close to a maintenance window.
# Set the default walltime as appropriate for what you are running (12 hours below).
site:
ncsapool:
s3df:
profile:
condor:
+Walltime: 43200
includeConfigs:
- ${AP_PIPE_DIR}/bps/clustering/clustering_ApTemplate.yaml
# if you are running on data that needs skyCorr, such as HSC, comment out the previous line
# and use this one instead:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_HSC_ApTemplate.yaml

# Memory allocated for each quantum, in MBs; can be overridden on a per-task basis.
# The bps default is 2048 MB, the same as this example, but it's kept here as a reminder.
Expand All @@ -44,6 +49,11 @@ pipetask:
makeWarp:
requestMemory: 8192

# Set the appropriate wms service class for the batch system
# you're using (HTCondor, Parsl, Slurm, triple Slurm, etc.)
# See https://developer.lsst.io/usdf/batch.html for details of doing this at S3DF.
wmsServiceClass: lsst.ctrl.bps.htcondor.HTCondorService

# Extra options for qgraph generation; will skip existing quanta in the output collection
# Note that --skip-existing-in will not work the first time you run a workflow.
extraQgraphOptions: "--skip-existing-in {output}"
28 changes: 28 additions & 0 deletions bps/clustering/clustering_ApPipe.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# This is a prescription for quantum clustering with BPS, suitable for any
# concrete pipeline based on the AP pipeline. Note that there are separate
# example files for pipelines with and without fakes.
#
# Use it by adding:
#
# includeConfigs:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_ApPipe.yaml
#
# (with no outer indentation) to your BPS config file. If you are running
# fakes, use instead:
#
# includeConfigs:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_ApPipeWithFakes.yaml
#
# again with no outer indentation.
#

clusterAlgorithm: lsst.ctrl.bps.quantum_clustering_funcs.dimension_clustering
cluster:
singleFrame:
pipetasks: isr,characterizeImage,calibrate
dimensions: visit,detector
equalDimensions: visit:exposure
diffim:
pipetasks: retrieveTemplate,subtractImages,detectAndMeasure,transformDiaSrcCat,diaPipe
dimensions: visit,detector
equalDimensions: visit:exposure
31 changes: 31 additions & 0 deletions bps/clustering/clustering_ApPipeWithFakes.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# This is a prescription for quantum clustering with BPS, suitable for any
# concrete pipeline based on the AP pipeline. Note that there are separate
# example files for pipelines with and without fakes.
#
# Use it by adding:
#
# includeConfigs:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_ApPipe.yaml
#
# (with no outer indentation) to your BPS config file. If you are running
# fakes, use instead:
#
# includeConfigs:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_ApPipeWithFakes.yaml
#
# again with no outer indentation.
#

clusterAlgorithm: lsst.ctrl.bps.quantum_clustering_funcs.dimension_clustering
cluster:
singleFrame:
pipetasks: isr,characterizeImage,calibrate
dimensions: visit,detector
equalDimensions: visit:exposure
coaddFakes:
pipetasks: coaddFakes
dimensions: tract,patch
diffim:
pipetasks: processVisitFakes,retrieveTemplateWithFakes,subtractImagesWithFakes,detectAndMeasureWithFakes,transformDiaSrcCatWithFakes,diaPipe,fakesMatch
dimensions: visit,detector
equalDimensions: visit:exposure
26 changes: 26 additions & 0 deletions bps/clustering/clustering_ApTemplate.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# This is a prescription for quantum clustering with BPS, suitable for any
# concrete pipeline based on the AP template pipeline that does not run SkyCorr.
#
# Use it by adding:
#
# includeConfigs:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_ApTemplate.yaml
#
# (with no outer indentation) to your BPS config file.
#

clusterAlgorithm: lsst.ctrl.bps.quantum_clustering_funcs.dimension_clustering
cluster:
singleFrame:
pipetasks: isr,characterizeImage,calibrate
dimensions: visit,detector
equalDimensions: visit:exposure
consolidate:
pipetasks: consolidateVisitSummary
dimensions: visit
makeWarp:
pipetasks: makeWarp
dimensions: tract,patch,visit
selectAndAssemble:
pipetasks: selectGoodSeeingVisits,assembleCoadd
dimensions: band,tract,patch
28 changes: 28 additions & 0 deletions bps/clustering/clustering_HSC_ApTemplate.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# This is a prescription for quantum clustering with BPS, suitable for any
# concrete pipeline based on the AP template pipeline that include SkyCorr
# such as when running on HSC data.
#
# Use it by adding:
#
# includeConfigs:
# - ${AP_PIPE_DIR}/bps/clustering/clustering_HSC_ApTemplate.yaml
#
# (with no outer indentation) to your BPS config file.
#

clusterAlgorithm: lsst.ctrl.bps.quantum_clustering_funcs.dimension_clustering
cluster:
singleFrame:
pipetasks: isr,characterizeImage,calibrate
dimensions: visit,detector
equalDimensions: visit:exposure
corrAndConsolidate:
pipetasks: skyCorr,consolidateVisitSummary
dimensions: visit
makeWarp:
pipetasks: makeWarp
dimensions: tract,patch,visit
selectAndAssemble:
pipetasks: selectGoodSeeingVisits,assembleCoadd
dimensions: band,tract,patch

0 comments on commit d88c0eb

Please sign in to comment.