Skip to content

Commit

Permalink
Merge pull request #76 from lsst/tickets/DM-32594
Browse files Browse the repository at this point in the history
DM-32594: Roll back BpsConfig changes that added dependence on inflection package.
  • Loading branch information
MichelleGower committed Nov 16, 2021
2 parents d6d76fe + 740d8c4 commit c98ffde
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 55 deletions.
12 changes: 6 additions & 6 deletions config/bps_idf.yaml
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
#PANDA plugin specific settings:
idds_server: "https://aipanda015.cern.ch:443/idds"
iddsServer: "https://aipanda015.cern.ch:443/idds"
placeholderParams: ['qgraphNodeId', 'qgraphId']
defaultPreCmdOpts: "--long-log --log-level=VERBOSE --log-file payload-log.json"

#IDF PanDA specific settings:
computeSite: LSST
payload:
s3_endpoint_url: "https://storage.googleapis.com"
payload_folder: payload
fileDistributionEndPoint: "s3://butler-us-central1-panda-dev/dc2/{payload_folder}/{uniqProcName}/"
s3EndpointUrl: "https://storage.googleapis.com"
payloadFolder: payload
fileDistributionEndPoint: "s3://butler-us-central1-panda-dev/dc2/{payloadFolder}/{uniqProcName}/"

#SLAC PanDA specific settings:
#computing_cloud: US
#computingCloud: US
#computeSite: DOMA_LSST_SLAC_TEST


Expand All @@ -30,5 +30,5 @@ runQuantumCommand: "${CTRL_MPEXEC_DIR}/bin/pipetask {runPreCmdOpts} run -b {butl


#this is a series of setup commands preceding the actual core SW execution
runner_command: 'logDir=/tmp/panda/${PANDAID}; mkdir ${logDir}; logFile=${logDir}/${REALTIME_LOGFILES}; touch ${logFile}; chmod ugo+w ${logFile}; ln -s ${logFile} ${PWD}/; ls -l ${PWD}/; docker run -v ${logFile}:/tmp/${REALTIME_LOGFILES} --network host --privileged --env AWS_ACCESS_KEY_ID=$(</credentials/AWS_ACCESS_KEY_ID) --env AWS_SECRET_ACCESS_KEY=$(</credentials/AWS_SECRET_ACCESS_KEY) --env PGPASSWORD=$(</credentials/PGPASSWORD) --env S3_ENDPOINT_URL=${S3_ENDPOINT_URL} {sw_image} /bin/bash -c "source /opt/lsst/software/stack/loadLSST.bash;cd /tmp;ls -al;setup lsst_distrib;pwd;python3 \${CTRL_BPS_DIR}/python/lsst/ctrl/bps/wms/panda/edgenode/cmd_line_decoder.py _cmd_line_ " >&2;'
runnerCommand: 'logDir=/tmp/panda/${PANDAID}; mkdir ${logDir}; logFile=${logDir}/${REALTIME_LOGFILES}; touch ${logFile}; chmod ugo+w ${logFile}; ln -s ${logFile} ${PWD}/; ls -l ${PWD}/; docker run -v ${logFile}:/tmp/${REALTIME_LOGFILES} --network host --privileged --env AWS_ACCESS_KEY_ID=$(</credentials/AWS_ACCESS_KEY_ID) --env AWS_SECRET_ACCESS_KEY=$(</credentials/AWS_SECRET_ACCESS_KEY) --env PGPASSWORD=$(</credentials/PGPASSWORD) --env S3_ENDPOINT_URL=${S3_ENDPOINT_URL} {sw_image} /bin/bash -c "source /opt/lsst/software/stack/loadLSST.bash;cd /tmp;ls -al;setup lsst_distrib;pwd;python3 \${CTRL_BPS_DIR}/python/lsst/ctrl/bps/wms/panda/edgenode/cmd_line_decoder.py _cmd_line_ " >&2;'
wmsServiceClass: lsst.ctrl.bps.wms.panda.panda_service.PanDAService
3 changes: 3 additions & 0 deletions doc/changes/DM-32594.other.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Rolled back changes in BpsConfig that were added for flexibility when looking up config values
(e.g., snake case keys will no longer match camel case keys nor will either match lower case keys).
This also removed dependence on third-party inflection package.
47 changes: 10 additions & 37 deletions python/lsst/ctrl/bps/bps_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import string
import re
from importlib.resources import path as resources_path
import inflection

from lsst.daf.butler.core.config import Config

Expand Down Expand Up @@ -161,38 +160,6 @@ def __contains__(self, name):
found, _ = self.search(name, {})
return found

@staticmethod
def _search_casing(sect, key):
# Until have more robust handling of key casing at config creation
# time, try checking here for different key casing.
found = False
value = ""

_LOG.debug("_search_casing: sect=%s key=%s", sect, key)
if Config.__contains__(sect, key):
found = True
value = Config.__getitem__(sect, key)
elif '_' in key:
newkey = inflection.camelize(key, False)
_LOG.debug("_search_casing: newkey=%s", newkey)
if Config.__contains__(sect, newkey):
found = True
value = Config.__getitem__(sect, newkey)
else: # try converting camel to snake
newkey = inflection.underscore(key)
_LOG.debug("_search_casing: newkey=%s", newkey)
if Config.__contains__(sect, newkey):
found = True
value = Config.__getitem__(sect, newkey)
else: # Try all lower case
newkey = key.lower()
_LOG.debug("_search_casing: newkey=%s", newkey)
if Config.__contains__(sect, newkey):
found = True
value = Config.__getitem__(sect, newkey)

return found, value

def search(self, key, opt=None):
"""Search for key using given opt following hierarchy rules.
Expand Down Expand Up @@ -281,15 +248,21 @@ def search(self, key, opt=None):
if Config.__contains__(search_sect, currkey):
search_sect = Config.__getitem__(search_sect, currkey)

found, value = self._search_casing(search_sect, key)
if found:
_LOG.debug("%s %s", key, search_sect)
if Config.__contains__(search_sect, key):
found = True
value = Config.__getitem__(search_sect, key)
break
else:
_LOG.debug("Missing search section '%s' while searching for '%s'", sect, key)

# lastly check root values
if not found:
_LOG.debug("Searching root section for key '%s'", key)
found, value = self._search_casing(self, key)
_LOG.debug(" root found=%s, value='%s'", found, value)
if Config.__contains__(self, key):
found = True
value = Config.__getitem__(self, key)
_LOG.debug("root value='%s'", value)

if not found and "default" in opt:
value = opt["default"]
Expand Down
4 changes: 3 additions & 1 deletion python/lsst/ctrl/bps/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,9 @@ def _get_job_values(config, search_opt, cmd_line_key):
"""
job_values = {}
for attr in _ATTRS_ALL:
found, value = config.search(attr, opt=search_opt)
# Variable names in yaml are camel case instead of snake case.
yaml_name = re.sub(r"_(\S)", lambda match: match.group(1).upper(), attr)
found, value = config.search(yaml_name, opt=search_opt)
if found:
job_values[attr] = value
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@ pipelineYaml: "/sdf/home/j/jchiang/PanDA_testing/pipelines/cpBias.yaml"
#USER qgraphFile: "/path/to/existing/file.qgraph"

#PANDA plugin specific settings:
idds_server: "https://aipanda015.cern.ch:443/idds"
iddsServer: "https://aipanda015.cern.ch:443/idds"
placeholderParams: ['qgraphNodeId', 'qgraphId']

#IDF PanDA specific settings:
#computing_cloud: LSST
#computingCloud: LSST

#SLAC PanDA specific settings:
computing_cloud: US
computingCloud: US
computeSite: DOMA_LSST_SLAC_TEST


Expand Down Expand Up @@ -41,9 +41,9 @@ payload:
#USER inCollection: HSC/calib,HSC/raw/all,refcats
#USER dataQuery: exposure=903342 AND detector=10

payload_folder: payload
fileDistributionEndPoint: "/sdf/group/lsst/software/IandT/tmp/{payload_folder}/{uniqProcName}/"
runner_command: 'unset PYTHONPATH;export LSST_DB_AUTH=/sdf/home/l/lsstsvc1/.lsst/db-auth.yaml;source /sdf/group/lsst/software/IandT/stack-lsst_distrib-d_2021_08_03/loadLSST.bash;cd ${PILOT_HOME};pwd;ls -a;setup lsst_distrib;env;python3 ${CTRL_BPS_DIR}/python/lsst/ctrl/bps/wms/panda/edgenode/cmd_line_decoder.py _cmd_line_ '
payloadFolder: payload
fileDistributionEndPoint: "/sdf/group/lsst/software/IandT/tmp/{payloadFolder}/{uniqProcName}/"
runnerCommand: 'unset PYTHONPATH;export LSST_DB_AUTH=/sdf/home/l/lsstsvc1/.lsst/db-auth.yaml;source /sdf/group/lsst/software/IandT/stack-lsst_distrib-d_2021_08_03/loadLSST.bash;cd ${PILOT_HOME};pwd;ls -a;setup lsst_distrib;env;python3 ${CTRL_BPS_DIR}/python/lsst/ctrl/bps/wms/panda/edgenode/cmd_line_decoder.py _cmd_line_ '


pipetask:
Expand All @@ -66,7 +66,7 @@ executionButler:

whenMerge: "ALWAYS"
implementation: JOB # JOB, WORKFLOW
concurrency_limit: db_limit
concurrencyLimit: db_limit
command1: "${DAF_BUTLER_DIR}/bin/butler --log-level=VERBOSE transfer-datasets {executionButlerDir} {butlerConfig} --collections {outputRun} --register-dataset-types"
command2: "${DAF_BUTLER_DIR}/bin/butler collection-chain {butlerConfig} {output} {outputRun} --mode=prepend"

Expand All @@ -81,7 +81,7 @@ createQuantumGraph: '${CTRL_MPEXEC_DIR}/bin/pipetask qgraph -d "{dataQuery}" -b
runQuantumCommand: "${CTRL_MPEXEC_DIR}/bin/pipetask --long-log run -b {butlerConfig} --output-run {outputRun} --qgraph {fileDistributionEndPoint}/{qgraphFile} --qgraph-id {qgraphId} --qgraph-node-id {qgraphNodeId} --skip-init-writes --extend-run --clobber-outputs --skip-existing"

#createQuantumGraph: '<ENV:CTRL_MPEXEC_DIR>/bin/pipetask qgraph -d "{dataQuery}" -b {butlerConfig} -i {inCollection} --instrument {instrument} -p {pipelineYaml} -q {qgraphFile} --qgraph-dot {qgraphFile}.dot'
#runQuantumCommand: 'export LSST_DB_AUTH=/sdf/home/l/lsstsvc1/.lsst/db-auth.yaml;<ENV:CTRL_MPEXEC_DIR>/bin/pipetask --long-log run -b {butlerConfig} -i {inCollection} --output-run {outputRun} --extend-run --skip-init-writes --qgraph {fileDistributionEndPoint}/{payload_folder}/{uniqProcName}/<FILE:runQgraphFile> --qgraph-id {qgraphId} --qgraph-node-id {qgraphNodeId} --clobber-outputs --skip-existing --no-versions'
#runQuantumCommand: 'export LSST_DB_AUTH=/sdf/home/l/lsstsvc1/.lsst/db-auth.yaml;<ENV:CTRL_MPEXEC_DIR>/bin/pipetask --long-log run -b {butlerConfig} -i {inCollection} --output-run {outputRun} --extend-run --skip-init-writes --qgraph {fileDistributionEndPoint}/{payloadFolder}/{uniqProcName}/<FILE:runQgraphFile> --qgraph-id {qgraphId} --qgraph-node-id {qgraphNodeId} --clobber-outputs --skip-existing --no-versions'


requestMemory: 2048
Expand Down
6 changes: 3 additions & 3 deletions python/lsst/ctrl/bps/wms/panda/panda_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ def add_decoder_prefix(self, cmd_line, distribution_path, files):
"""

cmdline_hex = self.convert_exec_string_to_hex(cmd_line)
_, decoder_prefix = self.config.search("runner_command", opt={"replaceEnvVars": False,
"expandEnvVars": False})
_, decoder_prefix = self.config.search("runnerCommand", opt={"replaceEnvVars": False,
"expandEnvVars": False})
decoder_prefix = decoder_prefix.replace("_cmd_line_", str(cmdline_hex) + " ${IN/L} "
+ distribution_path + " "
+ "+".join(f'{k}:{v}' for k, v in files[0].items())
Expand Down Expand Up @@ -170,7 +170,7 @@ def submit(self, workflow):
'workflow': idds_client_workflow}
}
c = pandaclient.idds_api.get_api(idds_utils.json_dumps,
idds_host=self.config.get('idds_server'), compress=True)
idds_host=self.config.get('iddsServer'), compress=True)
request_id = c.add_request(**idds_request)
_LOG.info("Submitted into iDDs with request id=%i", request_id)
workflow.run_id = request_id
Expand Down

0 comments on commit c98ffde

Please sign in to comment.