diff --git a/Validation/RecoParticleFlow/Makefile b/Validation/RecoParticleFlow/Makefile index 32accc7c01cac..1b35e106a3ee7 100644 --- a/Validation/RecoParticleFlow/Makefile +++ b/Validation/RecoParticleFlow/Makefile @@ -6,7 +6,7 @@ conf: cd ${TMPDIR} && ${RELVALCMD} conf reco 0 dumpconf: - cd ${TMPDIR}/conf && python3 -c 'import step3; print step3.process.dumpPython()' > step3_dump.py + cd ${TMPDIR}/conf && python3 -c 'import step3; print(step3.process.dumpPython())' > step3_dump.py cp ${TMPDIR}/conf/step3.py test/crab/ cp ${TMPDIR}/conf/step3_dump.py test/crab/ @@ -17,7 +17,7 @@ QCD_reco: #Need to expand the CMSSW python3 configuration QCD_dumpconf: - cd ${TMPDIR}/QCD && python3 -c 'import step3_RAW2DIGI_L1Reco_RECO_RECOSIM_EI_PAT as step3; print step3.process.dumpPython()' > step3_dump.py + cd ${TMPDIR}/QCD && python3 -c 'import step3_RAW2DIGI_L1Reco_RECO_RECOSIM_EI_PAT as step3; print(step3.process.dumpPython())' > step3_dump.py cp ${TMPDIR}/QCD/step3_dump.py crab/ QCDPU_reco: diff --git a/Validation/RecoParticleFlow/README.md b/Validation/RecoParticleFlow/README.md index 7e879e02403a1..637e08849e902 100644 --- a/Validation/RecoParticleFlow/README.md +++ b/Validation/RecoParticleFlow/README.md @@ -9,8 +9,8 @@ for lxplus with SLC7 (default since April 2019) ~~~ export SCRAM_ARCH=slc7_amd64_gcc900 -cmsrel CMSSW_11_3_0_pre1 -cd CMSSW_11_3_0_pre1 +cmsrel CMSSW_12_1_0_pre3 +cd CMSSW_12_1_0_pre3 cmsenv ~~~ @@ -86,6 +86,20 @@ In this case the URL for the directory is 'http://cern.ch/foo/plots', where 'foo (This requires that your personal cern web page cern.ch/username is enabled) +# Running via condor + +Make sure datasets.py is already parsed above and there are input file lists under ${CMSSW_BASE}/src/Validation/RecoParticleFlow/test/tmp/das_cache. This is written assuming that you are running condor jobs on CERN lxplus, although with some modifications, the setup can be used with condor of other clusters. + +~~~ +cd ${CMSSW_BASE}/src/Validation/RecoParticleFlow/test +voms-proxy-init -voms cms +cmsenv +mkdir -p log +condor_submit condor_QCD.jdl +~~~ + +The output files will appear /eos/cms/store/group/phys_pf/PFVal/QCD. You will want to make sure you are subscribed to cms-eos-phys-pf so that you have eos write access. There are jdl files for other datasets also. + # Running via crab @@ -111,12 +125,12 @@ Note that the datasets to run over are defined in the below script. Modify the "samples" -list there for changing datasets to process. ~~~ -python multicrab.py +python3 multicrab.py ~~~ Once the jobs are done, move the step3_inMINIAODSIM root files from your GRID destination directory to test/tmp/QCD (etc) directory and proceed -with QCD_dqm etc. +with QCD_dqm etc. Please note that any file matching 'step3\*MINIAODSIM\*.root' will be included in the DQM step, so delete files you don't want to study. @@ -131,8 +145,8 @@ Take note that the CMSSW python3 configuration for running the RECO sequence is ~~~ # For example (default for 2021): #CONDITIONS=auto:phase1_2018_realistic ERA=Run2_2018 # for 2018 scenarios -CONDITIONS=auto:phase1_2021_realistic ERA=Run3 # for run 3 -#CONDITIONS=auto:phase2_realistic ERA=Phase2C9 # for phase2 +CONDITIONS=auto:phase1_2021_realistic ERA=Run3 # for run 3 +#CONDITIONS=auto:phase2_realistic ERA=Phase2C9 # for phase2 #Running with 2 threads allows to use more memory on grid NTHREADS=2 TMPDIR=tmp @@ -161,4 +175,3 @@ cmsDriver.py step5 --conditions $CONDITIONS -s DQM:@pfDQM --datatier DQMIO --nTh ~~~ cmsDriver.py step6 --conditions $CONDITIONS -s HARVESTING:@pfDQM --era $ERA --filetype DQM --filein file:step5.root --fileout file:step6.root >& step6.log & ~~~ - diff --git a/Validation/RecoParticleFlow/test/compare.py b/Validation/RecoParticleFlow/test/compare.py index 6d394038a5615..2f21e4baef7bd 100644 --- a/Validation/RecoParticleFlow/test/compare.py +++ b/Validation/RecoParticleFlow/test/compare.py @@ -25,7 +25,7 @@ def parse_sample_string(ss): #check that all supplied files are actually ROOT files for fi in files: - print "Trying to open DQM file {0} for sample {1}".format(fi, name) + print("Trying to open DQM file {0} for sample {1}".format(fi, name)) if not os.path.isfile(fi): raise Exception("Could not read DQM file {0}, it does not exist".format(fi)) tf = ROOT.TFile(fi) @@ -189,43 +189,42 @@ def doPFCandPlots(files, plots): def addPlots(plotter, folder, name, section, histograms, opts, Offset=False): - folders = [folder] + folders = [folder] #plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])] #KH print plots - if Offset : - plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])] - plotter.append("Offset", folders, PlotFolder(*plots, loopSubFolders=False, page="offset", section=section)) - elif "JetResponse" in folder : - plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])] - plotter.append("ParticleFlow/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="pf", section=section)) - for plot in plots: - plot.setProperties(ncols=3) - plot.setProperties(legendDw=-0.68) - plot.setProperties(legendDh=0.005) - plot.setProperties(legendDy=0.24) - plot.setProperties(legendDx=0.05) - elif "JetMET" in folder: - for h in histograms: - plots = [PlotGroup(h, [Plot(h, **opts)])] - for plot in plots: - plot.setProperties(legendDw=-0.5) - plot.setProperties(legendDh=0.01) - plot.setProperties(legendDy=0.24) - plot.setProperties(legendDx=0.05) - plotter.append("JetMET" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="JetMET", section=section)) - if "PackedCandidates" in folder: - for h in histograms: - if ("PtMid" in h or "PtHigh" in h): - plots = [PlotGroup(h, [Plot(h, ymin = pow(10,-1), ylog = True)])] - else: - plots = [PlotGroup(h, [Plot(h, **opts)])] - - for plot in plots: - plot.setProperties(legendDw=-0.5) - plot.setProperties(legendDh=0.01) - plot.setProperties(legendDy=0.24) - plot.setProperties(legendDx=0.05) - plotter.append("ParticleFlow/PackedCandidates/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="PackedCandidates", section= section)) + if Offset : + plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])] + plotter.append("Offset", folders, PlotFolder(*plots, loopSubFolders=False, page="offset", section=section)) + elif "JetResponse" in folder : + plots = [PlotGroup(name, [Plot(h, **opts) for h in histograms])] + plotter.append("ParticleFlow/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="pf", section=section)) + for plot in plots: + plot.setProperties(ncols=3) + plot.setProperties(legendDw=-0.68) + plot.setProperties(legendDh=0.005) + plot.setProperties(legendDy=0.24) + plot.setProperties(legendDx=0.05) + elif "JetMET" in folder: + for h in histograms: + plots = [PlotGroup(h, [Plot(h, **opts)])] + for plot in plots: + plot.setProperties(legendDw=-0.5) + plot.setProperties(legendDh=0.01) + plot.setProperties(legendDy=0.24) + plot.setProperties(legendDx=0.05) + plotter.append("JetMET" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="JetMET", section=section)) + if "PackedCandidates" in folder: + for h in histograms: + if ("PtMid" in h or "PtHigh" in h): + plots = [PlotGroup(h, [Plot(h, ymin = pow(10,-1), ylog = True)])] + else: + plots = [PlotGroup(h, [Plot(h, **opts)])] + for plot in plots: + plot.setProperties(legendDw=-0.5) + plot.setProperties(legendDh=0.01) + plot.setProperties(legendDy=0.24) + plot.setProperties(legendDx=0.05) + plotter.append("ParticleFlow/PackedCandidates/" + section, folders, PlotFolder(*plots, loopSubFolders=False, page="PackedCandidates", section= section)) def main(): @@ -260,7 +259,7 @@ def main(): fullJetFolder = "DQMData/Run 1/ParticleFlow/Run summary/{0}".format(folder) fullMETFolder = "DQMData/Run 1/JetMET/Run summary/{0}".format(folder) fullPFCandFolder = "DQMData/Run 1/ParticleFlow/Run summary/PackedCandidates/{0}".format(folder) - print "Booking histogram group {0}={1} from folder {2}".format(name, histograms, folder) + print("Booking histogram group {0}={1} from folder {2}".format(name, histograms, folder)) if "Offset/" in folder: opts = {'xtitle':'Default', 'ytitle':'Default'} addPlots(plotter, fullJetFolder, name, folder, histograms, opts, True) diff --git a/Validation/RecoParticleFlow/test/MinBias.jdl b/Validation/RecoParticleFlow/test/condor_NuGunPU.jdl similarity index 55% rename from Validation/RecoParticleFlow/test/MinBias.jdl rename to Validation/RecoParticleFlow/test/condor_NuGunPU.jdl index db375df766e02..6fd39c3b8e668 100644 --- a/Validation/RecoParticleFlow/test/MinBias.jdl +++ b/Validation/RecoParticleFlow/test/condor_NuGunPU.jdl @@ -2,33 +2,35 @@ Universe = vanilla Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh -Arguments = "MinBias reco $(Process)" +Arguments = "NuGunPU reco $(Process)" Log = log/test.$(Cluster).log Output = log/test.out.$(Cluster).$(Process) Error = log/test.err.$(Cluster).$(Process) -Environment = "CMSSW_BASE=$ENV(CMSSW_BASE) PERJOB=500" +Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)" -#Requirements=(TARGET.OpSysAndVer=="CentOS7") -RequestMemory = 3000 -RequestCpus = 1 -#6h runtume -+MaxRuntime = 21600 +RequestMemory = 20000 +RequestCpus = 8 + +#24h runtume ++MaxRuntime = 24*3600 ++JobFlavour = "tomorrow" #choose job environment using singularity +RunAsOwner = True +SleepSlot = True +InteractiveUser = true -+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6" ++SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7" +SingularityBindCVMFS = True run_as_owner = True +use_x509userproxy = True x509userproxy = $ENV(X509_USER_PROXY) #transfer section -transfer_output_files = MinBias/step3.log,MinBias/step3_inMINIAODSIM.root -transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root" +transfer_output_files = NuGunPU/step3.log,NuGunPU/step3_inMINIAODSIM.root +transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/NuGunPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/NuGunPU/step3_inMINIAODSIM.$(Cluster).$(Process).root" should_transfer_files = YES when_to_transfer_output = ON_EXIT -Queue 14 +Queue 18 diff --git a/Validation/RecoParticleFlow/test/QCD.jdl b/Validation/RecoParticleFlow/test/condor_QCD.jdl similarity index 64% rename from Validation/RecoParticleFlow/test/QCD.jdl rename to Validation/RecoParticleFlow/test/condor_QCD.jdl index 1ecb942e24636..73d1645f0f27c 100644 --- a/Validation/RecoParticleFlow/test/QCD.jdl +++ b/Validation/RecoParticleFlow/test/condor_QCD.jdl @@ -7,30 +7,30 @@ Arguments = "QCD reco $(Process)" Log = log/test.$(Cluster).log Output = log/test.out.$(Cluster).$(Process) Error = log/test.err.$(Cluster).$(Process) -Environment = "CMSSW_BASE=$ENV(CMSSW_BASE) PERJOB=500" +Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)" -#Requirements=(TARGET.OpSysAndVer=="CentOS7") -RequestMemory = 3000 -RequestCpus = 1 -request_disk = 2000MB +RequestMemory = 20000 +RequestCpus = 8 -#6h runtume -+MaxRuntime = 21600 +#8h runtume ++MaxRuntime = 28800 ++JobFlavour = "workday" #choose job environment using singularity +RunAsOwner = True +SleepSlot = True +InteractiveUser = true -+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6" ++SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7" +SingularityBindCVMFS = True run_as_owner = True +use_x509userproxy = True x509userproxy = $ENV(X509_USER_PROXY) #transfer section transfer_output_files = QCD/step3.log,QCD/step3_inMINIAODSIM.root -transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root" +transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/QCD/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/QCD/step3_inMINIAODSIM.$(Cluster).$(Process).root" should_transfer_files = YES when_to_transfer_output = ON_EXIT -Queue 25 +Queue 50 diff --git a/Validation/RecoParticleFlow/test/QCDPU.jdl b/Validation/RecoParticleFlow/test/condor_QCDPU.jdl similarity index 68% rename from Validation/RecoParticleFlow/test/QCDPU.jdl rename to Validation/RecoParticleFlow/test/condor_QCDPU.jdl index 3627585b38cb6..e6b806125069a 100644 --- a/Validation/RecoParticleFlow/test/QCDPU.jdl +++ b/Validation/RecoParticleFlow/test/condor_QCDPU.jdl @@ -9,24 +9,27 @@ Output = log/test.out.$(Cluster).$(Process) Error = log/test.err.$(Cluster).$(Process) Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)" -RequestMemory = 3000 -RequestCpus = 1 -#6h runtume -+MaxRuntime = 12*3600 +RequestMemory = 20000 +RequestCpus = 8 + +#24h runtume ++MaxRuntime = 24*3600 ++JobFlavour = "tomorrow" #choose job environment using singularity +RunAsOwner = True +SleepSlot = True +InteractiveUser = true -+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6" ++SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7" +SingularityBindCVMFS = True run_as_owner = True +use_x509userproxy = True x509userproxy = $ENV(X509_USER_PROXY) #transfer section transfer_output_files = QCDPU/step3.log,QCDPU/step3_inMINIAODSIM.root -transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root" +transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/QCDPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/QCDPU/step3_inMINIAODSIM.$(Cluster).$(Process).root" should_transfer_files = YES when_to_transfer_output = ON_EXIT diff --git a/Validation/RecoParticleFlow/test/condor_TenTauPU.jdl b/Validation/RecoParticleFlow/test/condor_TenTauPU.jdl new file mode 100644 index 0000000000000..4efbba0f272cc --- /dev/null +++ b/Validation/RecoParticleFlow/test/condor_TenTauPU.jdl @@ -0,0 +1,37 @@ +#This condor submission file runs the RECO step in N jobs on a single RelVal file +Universe = vanilla + +Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh +Arguments = "TenTauPU reco $(Process)" + +Log = log/test.$(Cluster).log +Output = log/test.out.$(Cluster).$(Process) +Error = log/test.err.$(Cluster).$(Process) +Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)" + +RequestMemory = 20000 +RequestCpus = 8 + +#24h runtume ++MaxRuntime = 24*3600 ++JobFlavour = "tomorrow" + +#choose job environment using singularity ++RunAsOwner = True ++SleepSlot = True ++InteractiveUser = true ++SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7" ++SingularityBindCVMFS = True +run_as_owner = True + +use_x509userproxy = True +x509userproxy = $ENV(X509_USER_PROXY) + +#transfer section +transfer_output_files = TenTauPU/step3.log,TenTauPU/step3_inMINIAODSIM.root +transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/TenTauPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/TenTauPU/step3_inMINIAODSIM.$(Cluster).$(Process).root" +should_transfer_files = YES +when_to_transfer_output = ON_EXIT + +Queue 8 + diff --git a/Validation/RecoParticleFlow/test/ZMM.jdl b/Validation/RecoParticleFlow/test/condor_ZEEPU.jdl similarity index 55% rename from Validation/RecoParticleFlow/test/ZMM.jdl rename to Validation/RecoParticleFlow/test/condor_ZEEPU.jdl index 51fb8ac3fa7fc..0c71b0f5d18c7 100644 --- a/Validation/RecoParticleFlow/test/ZMM.jdl +++ b/Validation/RecoParticleFlow/test/condor_ZEEPU.jdl @@ -2,33 +2,35 @@ Universe = vanilla Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh -Arguments = "ZMM reco $(Process)" +Arguments = "ZEEPU reco $(Process)" Log = log/test.$(Cluster).log Output = log/test.out.$(Cluster).$(Process) Error = log/test.err.$(Cluster).$(Process) -Environment = "CMSSW_BASE=$ENV(CMSSW_BASE) PERJOB=500" +Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)" -#Requirements=(TARGET.OpSysAndVer=="CentOS7") -RequestMemory = 3000 -RequestCpus = 1 -#6h runtume -+MaxRuntime = 21600 +RequestMemory = 20000 +RequestCpus = 8 + +#24h runtume ++MaxRuntime = 24*3600 ++JobFlavour = "tomorrow" #choose job environment using singularity +RunAsOwner = True +SleepSlot = True +InteractiveUser = true -+SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel6" ++SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7" +SingularityBindCVMFS = True run_as_owner = True +use_x509userproxy = True x509userproxy = $ENV(X509_USER_PROXY) #transfer section -transfer_output_files = ZMM/step3.log,ZMM/step3_inMINIAODSIM.root -transfer_output_remaps = "step3.log=step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=step3_inMINIAODSIM.$(Cluster).$(Process).root" +transfer_output_files = ZEEPU/step3.log,ZEEPU/step3_inMINIAODSIM.root +transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/ZEEPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/ZEEPU/step3_inMINIAODSIM.$(Cluster).$(Process).root" should_transfer_files = YES when_to_transfer_output = ON_EXIT -Queue 3 +Queue 9 diff --git a/Validation/RecoParticleFlow/test/condor_ZMMPU.jdl b/Validation/RecoParticleFlow/test/condor_ZMMPU.jdl new file mode 100644 index 0000000000000..9df849b1fb1e8 --- /dev/null +++ b/Validation/RecoParticleFlow/test/condor_ZMMPU.jdl @@ -0,0 +1,37 @@ +#This condor submission file runs the RECO step in N jobs on a single RelVal file +Universe = vanilla + +Executable = $ENV(CMSSW_BASE)/src/Validation/RecoParticleFlow/test/run_relval.sh +Arguments = "ZMMPU reco $(Process)" + +Log = log/test.$(Cluster).log +Output = log/test.out.$(Cluster).$(Process) +Error = log/test.err.$(Cluster).$(Process) +Environment = "CMSSW_BASE=$ENV(CMSSW_BASE)" + +RequestMemory = 20000 +RequestCpus = 8 + +#24h runtume ++MaxRuntime = 24*3600 ++JobFlavour = "tomorrow" + +#choose job environment using singularity ++RunAsOwner = True ++SleepSlot = True ++InteractiveUser = true ++SingularityImage = "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7" ++SingularityBindCVMFS = True +run_as_owner = True + +use_x509userproxy = True +x509userproxy = $ENV(X509_USER_PROXY) + +#transfer section +transfer_output_files = ZMMPU/step3.log,ZMMPU/step3_inMINIAODSIM.root +transfer_output_remaps = "step3.log=/eos/cms/store/group/phys_pf/PFVal/ZMMPU/step3.$(Cluster).$(Process).log;step3_inMINIAODSIM.root=/eos/cms/store/group/phys_pf/PFVal/ZMMPU/step3_inMINIAODSIM.$(Cluster).$(Process).root" +should_transfer_files = YES +when_to_transfer_output = ON_EXIT + +Queue 18 + diff --git a/Validation/RecoParticleFlow/test/crab/multicrab.py b/Validation/RecoParticleFlow/test/crab/multicrab.py index 4cd903794b575..8fe31660ecde9 100644 --- a/Validation/RecoParticleFlow/test/crab/multicrab.py +++ b/Validation/RecoParticleFlow/test/crab/multicrab.py @@ -10,12 +10,12 @@ def submit(config): fi.write(config.pythonise_()) samples = [ - ("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_11_3_0_pre1-113X_mcRun3_2021_realistic_v1-v3/GEN-SIM-DIGI-RAW", "QCD_noPU"), - ("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "QCD_PU"), -f ("/RelValZEE_14/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZEE_PU"), - ("/RelValZMM_14/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZMM_PU"), - ("/RelValTenTau_15_500/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "TenTau_PU"), - ("/RelValNuGun/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "NuGun_PU"), + ("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_12_1_0_pre2-121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "QCD_noPU2"), + ("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "QCD_PU"), + ("/RelValZEE_14/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZEE_PU"), + ("/RelValZMM_14/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZMM_PU"), + ("/RelValTenTau_15_500/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "TenTau_PU"), + ("/RelValNuGun/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "NuGun_PU"), ] if __name__ == "__main__": @@ -43,7 +43,7 @@ def submit(config): #conf.Data.totalUnits = 50 conf.Data.publication = False conf.Data.outputDatasetTag = 'pfvalidation' - #conf.Data.ignoreLocality = True + conf.Data.ignoreLocality = True # Where the output files will be transmitted to conf.Site.storageSite = 'T3_US_Baylor' diff --git a/Validation/RecoParticleFlow/test/datasets.py b/Validation/RecoParticleFlow/test/datasets.py index f500cb0ce6bec..db2f01c311a34 100644 --- a/Validation/RecoParticleFlow/test/datasets.py +++ b/Validation/RecoParticleFlow/test/datasets.py @@ -129,11 +129,14 @@ def cache_das_filenames(self): #prefix = "root://xrootd-cms.infn.it//" tmpdir = "tmp" datasets = [ - Dataset("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_11_3_0_pre1-113X_mcRun3_2021_realistic_v1-v3/GEN-SIM-DIGI-RAW", "QCD_noPU", prefix, None, False, tmpdir), - Dataset("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "QCD_PU", prefix, None, False, tmpdir), - Dataset("/RelValZEE_14/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZEE_PU", prefix, None, False, tmpdir), - Dataset("/RelValZMM_14/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZMM_PU", prefix, None, False, tmpdir), - Dataset("/RelValTenTau_15_500/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "TenTau_PU", prefix, None, False, tmpdir), - Dataset("/RelValNuGun/CMSSW_11_3_0_pre1-PU_113X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "NuGun_PU", prefix, None, False, tmpdir)] + Dataset("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_12_1_0_pre2-121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "QCD_noPU", prefix, None, False, tmpdir), + Dataset("/RelValQCD_FlatPt_15_3000HS_14/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "QCD_PU", prefix, None, False, tmpdir), + Dataset("/RelValZEE_14/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZEE_PU", prefix, None, False, tmpdir), + Dataset("/RelValZMM_14/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "ZMM_PU", prefix, None, False, tmpdir), + Dataset("/RelValTenTau_15_500/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "TenTau_PU", prefix, None, False, tmpdir), + Dataset("/RelValNuGun/CMSSW_12_1_0_pre2-PU_121X_mcRun3_2021_realistic_v1-v1/GEN-SIM-DIGI-RAW", "NuGun_PU", prefix, None, False, tmpdir)] for ds in datasets: ds.cache_das_filenames() + + + diff --git a/Validation/RecoParticleFlow/test/run_relval.sh b/Validation/RecoParticleFlow/test/run_relval.sh index 67eab321ea7b1..9d149ffac8768 100755 --- a/Validation/RecoParticleFlow/test/run_relval.sh +++ b/Validation/RecoParticleFlow/test/run_relval.sh @@ -48,6 +48,11 @@ source /cvmfs/cms.cern.ch/cmsset_default.sh cd $CMSSW_BASE eval `scram runtime -sh` +#define HOME if not defined. +if [ -z "$HOME" ]; then + export HOME=/tmp +fi + #if the _CONDOR_SCRATCH_DIR is not defined, we are not inside a condor batch job if [ -z "$_CONDOR_SCRATCH_DIR" ]; then cd $LAUNCHDIR @@ -64,13 +69,13 @@ elif [ "$1" == "QCDPU" ]; then NAME=QCDPU elif [ "$1" == "ZEEPU" ]; then INPUT_FILELIST=${CMSSW_BASE}/src/Validation/RecoParticleFlow/test/tmp/das_cache/ZEE_PU.txt - NAME=ZEE + NAME=ZEEPU elif [ "$1" == "ZMMPU" ]; then INPUT_FILELIST=${CMSSW_BASE}/src/Validation/RecoParticleFlow/test/tmp/das_cache/ZMM_PU.txt - NAME=ZMM + NAME=ZMMPU elif [ "$1" == "TenTauPU" ]; then INPUT_FILELIST=${CMSSW_BASE}/src/Validation/RecoParticleFlow/test/tmp/das_cache/TenTau_PU.txt - NAME=TenTau + NAME=TenTauPU elif [ "$1" == "NuGunPU" ]; then INPUT_FILELIST=${CMSSW_BASE}/src/Validation/RecoParticleFlow/test/tmp/das_cache/NuGun_PU.txt NAME=NuGunPU