Skip to content

Commit

Permalink
Merge pull request #35382 from hatakeyamak/PFVal_12_1
Browse files Browse the repository at this point in the history
PFVal on-the-fly operation update for CMSSW_12_1
  • Loading branch information
cmsbuild committed Sep 24, 2021
2 parents 424e2d4 + c35993a commit 17d8742
Show file tree
Hide file tree
Showing 12 changed files with 201 additions and 100 deletions.
4 changes: 2 additions & 2 deletions Validation/RecoParticleFlow/Makefile
Expand Up @@ -6,7 +6,7 @@ conf:
cd ${TMPDIR} && ${RELVALCMD} conf reco 0

dumpconf:
cd ${TMPDIR}/conf && python3 -c 'import step3; print step3.process.dumpPython()' > step3_dump.py
cd ${TMPDIR}/conf && python3 -c 'import step3; print(step3.process.dumpPython())' > step3_dump.py
cp ${TMPDIR}/conf/step3.py test/crab/
cp ${TMPDIR}/conf/step3_dump.py test/crab/

Expand All @@ -17,7 +17,7 @@ QCD_reco:

#Need to expand the CMSSW python3 configuration
QCD_dumpconf:
cd ${TMPDIR}/QCD && python3 -c 'import step3_RAW2DIGI_L1Reco_RECO_RECOSIM_EI_PAT as step3; print step3.process.dumpPython()' > step3_dump.py
cd ${TMPDIR}/QCD && python3 -c 'import step3_RAW2DIGI_L1Reco_RECO_RECOSIM_EI_PAT as step3; print(step3.process.dumpPython())' > step3_dump.py
cp ${TMPDIR}/QCD/step3_dump.py crab/

QCDPU_reco:
Expand Down
27 changes: 20 additions & 7 deletions Validation/RecoParticleFlow/README.md
Expand Up @@ -9,8 +9,8 @@ for lxplus with SLC7 (default since April 2019)

~~~
export SCRAM_ARCH=slc7_amd64_gcc900
cmsrel CMSSW_11_3_0_pre1
cd CMSSW_11_3_0_pre1
cmsrel CMSSW_12_1_0_pre3
cd CMSSW_12_1_0_pre3
cmsenv
~~~

Expand Down Expand Up @@ -86,6 +86,20 @@ In this case the URL for the directory is 'http://cern.ch/foo/plots', where 'foo
(This requires that your personal cern web page cern.ch/username is enabled)


# Running via condor

Make sure datasets.py is already parsed above and there are input file lists under ${CMSSW_BASE}/src/Validation/RecoParticleFlow/test/tmp/das_cache. This is written assuming that you are running condor jobs on CERN lxplus, although with some modifications, the setup can be used with condor of other clusters.

~~~
cd ${CMSSW_BASE}/src/Validation/RecoParticleFlow/test
voms-proxy-init -voms cms
cmsenv
mkdir -p log
condor_submit condor_QCD.jdl
~~~

The output files will appear /eos/cms/store/group/phys_pf/PFVal/QCD. You will want to make sure you are subscribed to cms-eos-phys-pf so that you have eos write access. There are jdl files for other datasets also.


# Running via crab

Expand All @@ -111,12 +125,12 @@ Note that the datasets to run over are defined in the below script.
Modify the "samples" -list there for changing datasets to process.

~~~
python multicrab.py
python3 multicrab.py
~~~

Once the jobs are done, move the step3_inMINIAODSIM root files
from your GRID destination directory to test/tmp/QCD (etc) directory and proceed
with QCD_dqm etc.
with QCD_dqm etc.
Please note that any file matching 'step3\*MINIAODSIM\*.root' will
be included in the DQM step, so delete files you don't want to study.

Expand All @@ -131,8 +145,8 @@ Take note that the CMSSW python3 configuration for running the RECO sequence is
~~~
# For example (default for 2021):
#CONDITIONS=auto:phase1_2018_realistic ERA=Run2_2018 # for 2018 scenarios
CONDITIONS=auto:phase1_2021_realistic ERA=Run3 # for run 3
#CONDITIONS=auto:phase2_realistic ERA=Phase2C9 # for phase2
CONDITIONS=auto:phase1_2021_realistic ERA=Run3 # for run 3
#CONDITIONS=auto:phase2_realistic ERA=Phase2C9 # for phase2
#Running with 2 threads allows to use more memory on grid
NTHREADS=2 TMPDIR=tmp
Expand Down Expand Up @@ -161,4 +175,3 @@ cmsDriver.py step5 --conditions $CONDITIONS -s DQM:@pfDQM --datatier DQMIO --nTh
~~~
cmsDriver.py step6 --conditions $CONDITIONS -s HARVESTING:@pfDQM --era $ERA --filetype DQM --filein file:step5.root --fileout file:step6.root >& step6.log &
~~~

73 changes: 36 additions & 37 deletions Validation/RecoParticleFlow/test/compare.py
Expand Up @@ -25,7 +25,7 @@ def parse_sample_string(ss):

#check that all supplied files are actually ROOT files
for fi in files:
print "Trying to open DQM file {0} for sample {1}".format(fi, name)
print("Trying to open DQM file {0} for sample {1}".format(fi, name))
if not os.path.isfile(fi):
raise Exception("Could not read DQM file {0}, it does not exist".format(fi))
tf = ROOT.TFile(fi)
Expand Down Expand Up @@ -189,43 +189,42 @@ def doPFCandPlots(files, plots):


def addPlots(plotter, folder, name, section, histograms, opts, Offset=False):
folders = [folder]
folders = [folder]
#plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])]
#KH print plots
if Offset :
plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])]
plotter.append("Offset", folders, PlotFolder(*plots, loopSubFolders=False, page="offset", section=section))
elif "JetResponse" in folder :
plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])]
plotter.append("ParticleFlow/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="pf", section=section))
for plot in plots:
plot.setProperties(ncols=3)
plot.setProperties(legendDw=-0.68)
plot.setProperties(legendDh=0.005)
plot.setProperties(legendDy=0.24)
plot.setProperties(legendDx=0.05)
elif "JetMET" in folder:
for h in histograms:
plots = [PlotGroup(h, [Plot(h, **opts)])]
for plot in plots:
plot.setProperties(legendDw=-0.5)
plot.setProperties(legendDh=0.01)
plot.setProperties(legendDy=0.24)
plot.setProperties(legendDx=0.05)
plotter.append("JetMET" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="JetMET", section=section))
if "PackedCandidates" in folder:
for h in histograms:
if ("PtMid" in h or "PtHigh" in h):
plots = [PlotGroup(h, [Plot(h, ymin = pow(10,-1), ylog = True)])]
else:
plots = [PlotGroup(h, [Plot(h, **opts)])]

for plot in plots:
plot.setProperties(legendDw=-0.5)
plot.setProperties(legendDh=0.01)
plot.setProperties(legendDy=0.24)
plot.setProperties(legendDx=0.05)
plotter.append("ParticleFlow/PackedCandidates/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="PackedCandidates", section= section))
if Offset :
plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])]
plotter.append("Offset", folders, PlotFolder(*plots, loopSubFolders=False, page="offset", section=section))
elif "JetResponse" in folder :
plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])]
plotter.append("ParticleFlow/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="pf", section=section))
for plot in plots:
plot.setProperties(ncols=3)
plot.setProperties(legendDw=-0.68)
plot.setProperties(legendDh=0.005)
plot.setProperties(legendDy=0.24)
plot.setProperties(legendDx=0.05)
elif "JetMET" in folder:
for h in histograms:
plots = [PlotGroup(h, [Plot(h, **opts)])]
for plot in plots:
plot.setProperties(legendDw=-0.5)
plot.setProperties(legendDh=0.01)
plot.setProperties(legendDy=0.24)
plot.setProperties(legendDx=0.05)
plotter.append("JetMET" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="JetMET", section=section))
if "PackedCandidates" in folder:
for h in histograms:
if ("PtMid" in h or "PtHigh" in h):
plots = [PlotGroup(h, [Plot(h, ymin = pow(10,-1), ylog = True)])]
else:
plots = [PlotGroup(h, [Plot(h, **opts)])]
for plot in plots:
plot.setProperties(legendDw=-0.5)
plot.setProperties(legendDh=0.01)
plot.setProperties(legendDy=0.24)
plot.setProperties(legendDx=0.05)
plotter.append("ParticleFlow/PackedCandidates/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="PackedCandidates", section= section))


def main():
Expand Down Expand Up @@ -260,7 +259,7 @@ def main():
fullJetFolder = "DQMData/Run 1/ParticleFlow/Run summary/{0}".format(folder)
fullMETFolder = "DQMData/Run 1/JetMET/Run summary/{0}".format(folder)
fullPFCandFolder = "DQMData/Run 1/ParticleFlow/Run summary/PackedCandidates/{0}".format(folder)
print "Booking histogram group {0}={1} from folder {2}".format(name, histograms, folder)
print("Booking histogram group {0}={1} from folder {2}".format(name, histograms, folder))
if "Offset/" in folder:
opts = {'xtitle':'Default', 'ytitle':'Default'}
addPlots(plotter, fullJetFolder, name, folder, histograms, opts, True)
Expand Down
Expand Up @@ -2,33 +2,35 @@
Universe = vanilla

Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh
Arguments = "MinBias reco $(Process)"
Arguments = "NuGunPU reco $(Process)"

Log = log/test.$(Cluster).log
Output = log/test.out.$(Cluster).$(Process)
Error = log/test.err.$(Cluster).$(Process)
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE) PERJOB=500"
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)"

#Requirements=(TARGET.OpSysAndVer=="CentOS7")
RequestMemory = 3000
RequestCpus = 1
#6h runtume
+MaxRuntime = 21600
RequestMemory = 20000
RequestCpus = 8

#24h runtume
+MaxRuntime = 24*3600
+JobFlavour = "tomorrow"

#choose job environment using singularity
+RunAsOwner = True
+SleepSlot = True
+InteractiveUser = true
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6"
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7"
+SingularityBindCVMFS = True
run_as_owner = True

use_x509userproxy = True
x509userproxy = $ENV(X509_USER_PROXY)

#transfer section
transfer_output_files = MinBias/step3.log,MinBias/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root"
transfer_output_files = NuGunPU/step3.log,NuGunPU/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/NuGunPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/NuGunPU/step3_inMINIAODSIM.$(Cluster).$(Process).root"
should_transfer_files = YES
when_to_transfer_output = ON_EXIT

Queue 14
Queue 18
Expand Up @@ -7,30 +7,30 @@ Arguments = "QCD reco $(Process)"
Log = log/test.$(Cluster).log
Output = log/test.out.$(Cluster).$(Process)
Error = log/test.err.$(Cluster).$(Process)
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE) PERJOB=500"
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)"

#Requirements=(TARGET.OpSysAndVer=="CentOS7")
RequestMemory = 3000
RequestCpus = 1
request_disk = 2000MB
RequestMemory = 20000
RequestCpus = 8

#6h runtume
+MaxRuntime = 21600
#8h runtume
+MaxRuntime = 28800
+JobFlavour = "workday"

#choose job environment using singularity
+RunAsOwner = True
+SleepSlot = True
+InteractiveUser = true
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6"
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7"
+SingularityBindCVMFS = True
run_as_owner = True

use_x509userproxy = True
x509userproxy = $ENV(X509_USER_PROXY)

#transfer section
transfer_output_files = QCD/step3.log,QCD/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root"
transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/QCD/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/QCD/step3_inMINIAODSIM.$(Cluster).$(Process).root"
should_transfer_files = YES
when_to_transfer_output = ON_EXIT

Queue 25
Queue 50
Expand Up @@ -9,24 +9,27 @@ Output = log/test.out.$(Cluster).$(Process)
Error = log/test.err.$(Cluster).$(Process)
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)"

RequestMemory = 3000
RequestCpus = 1
#6h runtume
+MaxRuntime = 12*3600
RequestMemory = 20000
RequestCpus = 8

#24h runtume
+MaxRuntime = 24*3600
+JobFlavour = "tomorrow"

#choose job environment using singularity
+RunAsOwner = True
+SleepSlot = True
+InteractiveUser = true
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6"
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7"
+SingularityBindCVMFS = True
run_as_owner = True

use_x509userproxy = True
x509userproxy = $ENV(X509_USER_PROXY)

#transfer section
transfer_output_files = QCDPU/step3.log,QCDPU/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root"
transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/QCDPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/QCDPU/step3_inMINIAODSIM.$(Cluster).$(Process).root"
should_transfer_files = YES
when_to_transfer_output = ON_EXIT

Expand Down
37 changes: 37 additions & 0 deletions Validation/RecoParticleFlow/test/condor_TenTauPU.jdl
@@ -0,0 +1,37 @@
#This condor submission file runs the RECO step in N jobs on a single RelVal file
Universe = vanilla

Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh
Arguments = "TenTauPU reco $(Process)"

Log = log/test.$(Cluster).log
Output = log/test.out.$(Cluster).$(Process)
Error = log/test.err.$(Cluster).$(Process)
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)"

RequestMemory = 20000
RequestCpus = 8

#24h runtume
+MaxRuntime = 24*3600
+JobFlavour = "tomorrow"

#choose job environment using singularity
+RunAsOwner = True
+SleepSlot = True
+InteractiveUser = true
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7"
+SingularityBindCVMFS = True
run_as_owner = True

use_x509userproxy = True
x509userproxy = $ENV(X509_USER_PROXY)

#transfer section
transfer_output_files = TenTauPU/step3.log,TenTauPU/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/TenTauPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/TenTauPU/step3_inMINIAODSIM.$(Cluster).$(Process).root"
should_transfer_files = YES
when_to_transfer_output = ON_EXIT

Queue 8

Expand Up @@ -2,33 +2,35 @@
Universe = vanilla

Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh
Arguments = "ZMM reco $(Process)"
Arguments = "ZEEPU reco $(Process)"

Log = log/test.$(Cluster).log
Output = log/test.out.$(Cluster).$(Process)
Error = log/test.err.$(Cluster).$(Process)
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE) PERJOB=500"
Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)"

#Requirements=(TARGET.OpSysAndVer=="CentOS7")
RequestMemory = 3000
RequestCpus = 1
#6h runtume
+MaxRuntime = 21600
RequestMemory = 20000
RequestCpus = 8

#24h runtume
+MaxRuntime = 24*3600
+JobFlavour = "tomorrow"

#choose job environment using singularity
+RunAsOwner = True
+SleepSlot = True
+InteractiveUser = true
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6"
+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7"
+SingularityBindCVMFS = True
run_as_owner = True

use_x509userproxy = True
x509userproxy = $ENV(X509_USER_PROXY)

#transfer section
transfer_output_files = ZMM/step3.log,ZMM/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root"
transfer_output_files = ZEEPU/step3.log,ZEEPU/step3_inMINIAODSIM.root
transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/ZEEPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/ZEEPU/step3_inMINIAODSIM.$(Cluster).$(Process).root"
should_transfer_files = YES
when_to_transfer_output = ON_EXIT

Queue 3
Queue 9

0 comments on commit 17d8742

Please sign in to comment.