Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New batch scripts #3070

Merged
merged 1 commit into from Apr 1, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
69 changes: 49 additions & 20 deletions L1Trigger/TrackFindingAM/test/batch/AMPR.csh
Expand Up @@ -96,22 +96,49 @@ lfc-mkdir $OUTDIR

# We loop over the data directory in order to find all the files to process

foreach l (`lcg-ls $INDIR_GRID | cut -d/ -f15`)

@ ninput = 0

foreach ll (`lcg-ls $INDIR_GRID | grep EDM`)

set l = `basename $ll`

@ i = 0
@ j = $NPFILE

@ ninput += 1

# Uncomment this only if you want to limit the number of input files to deal with
# if ($ninput > 10) then
# continue
# endif

echo 'Working with file '$l

# First look if the file has been processed

set OUTF = `echo $l | cut -d. -f1`"_with_AMPR.root"
set OUTE = `echo $l | cut -d. -f1`"_with_FIT.root"
set OUTD = `echo $l | cut -d. -f1`"_extr.root"

set deale = `lcg-ls $OUTDIR_GRID/${OUTE} | wc -l`

if ($deale != "0") then
continue
endif

set dealf = `lcg-ls $OUTDIR_GRID/${OUTF} | wc -l`

if ($dealf != "0") then
rm final_job_${MATTER}.sh
echo "The file "$OUT" has been succesfully processed..."
echo "Time to process the trackfit on file "$OUTDIR_GRID/$OUT"_with_AMPR.root"
rm -f final_job_${OUTF}.sh

echo "#\!/bin/bash" > fit_job_${OUTF}.sh
echo "source $PACKDIR/batch/PR_processor.sh FIT $OUTDIR_XROOT/${OUTF} $OUTE $OUTD $NTOT $OUTDIR_GRID $RELEASEDIR $GTAG" >> fit_job_${OUTF}.sh
chmod 755 fit_job_${OUTF}.sh

if (${6} == "BATCH") then
bsub -q 1nd -e /dev/null -o /tmp/${LOGNAME}_out.txt fit_job_${OUTF}.sh
endif

continue
endif

Expand All @@ -132,11 +159,13 @@ foreach l (`lcg-ls $INDIR_GRID | cut -d/ -f15`)

# Check if the file has already been processed

set OUTM = `echo $l | cut -d. -f1`_${i}_${j}
set dealm = `lcg-ls $OUTDIR_GRID/MERGED_$OUTM.root | wc -l`
set OUTM = `echo $l | cut -d. -f1`
set dealm = `lcg-ls $OUTDIR_GRID/MERGED_${OUTM}_${i}_${j}.root | wc -l`

#echo $OUTDIR_GRID/MERGED_${OUTM}_${i}_${j}.root

if ($dealm != "0") then
rm merge_job_${MATTER}_${i}_${j}.sh
rm -f merge_job_${OUTM}_${i}_${j}.sh
@ processed += 1
@ i += $NPFILE
@ j += $NPFILE
Expand All @@ -154,7 +183,7 @@ foreach l (`lcg-ls $INDIR_GRID | cut -d/ -f15`)
set deal = `lcg-ls $OUTDIR_GRID/$OUTS1.root | wc -l`

if ($deal != "0") then # This process was ran
rm fpr_job_$OUTS1.sh
rm -f fpr_job_$OUTS1.sh
@ secdone += 1
@ sec += 1
continue
Expand All @@ -171,7 +200,7 @@ foreach l (`lcg-ls $INDIR_GRID | cut -d/ -f15`)
chmod 755 fpr_job_${OUTS1}.sh

if (${6} == "BATCH") then
bsub -q 2nd -e /dev/null -o /tmp/${LOGNAME}_out.txt fpr_job_${OUTS1}.sh
bsub -q 1nd -e /dev/null -o /tmp/${LOGNAME}_out.txt fpr_job_${OUTS1}.sh
endif
endif
endif
Expand All @@ -191,18 +220,18 @@ foreach l (`lcg-ls $INDIR_GRID | cut -d/ -f15`)
# If not process the file
if ($dealm == "0") then

set running = `\ls merge_job_${MATTER}_${i}_${j}.sh | wc -l`
set running = `\ls merge_job_${OUTM}_${i}_${j}.sh | wc -l`

if ($running == "0") then

echo 'Launching the merging for serie '${i}_${j}' in directory '$OUTDIR_GRID

echo "#\!/bin/bash" > merge_job_${MATTER}_${i}_${j}.sh
echo "source $PACKDIR/batch/PR_processor.sh MERGE ${i}_${j}.root $OUTDIR_GRID $OUTDIR_XROOT $OUTM.root $RELEASEDIR $GTAG" >> merge_job_${MATTER}_${i}_${j}.sh
chmod 755 merge_job_${MATTER}_${i}_${j}.sh
echo "#\!/bin/bash" > merge_job_${OUTM}_${i}_${j}.sh
echo "source $PACKDIR/batch/PR_processor.sh MERGE ${i}_${j}.root $OUTDIR_GRID $OUTDIR_XROOT ${OUTM}_ $RELEASEDIR $GTAG" >> merge_job_${OUTM}_${i}_${j}.sh
chmod 755 merge_job_${OUTM}_${i}_${j}.sh

if (${6} == "BATCH") then
bsub -q 8nh -e /dev/null -o /tmp/${LOGNAME}_out.txt merge_job_${MATTER}_${i}_${j}.sh
bsub -q 8nh -e /dev/null -o /tmp/${LOGNAME}_out.txt merge_job_${OUTM}_${i}_${j}.sh
endif
endif
endif
Expand All @@ -226,18 +255,18 @@ foreach l (`lcg-ls $INDIR_GRID | cut -d/ -f15`)
# If not process the file
if ($dealf == "0") then

set running = `\ls final_job_${MATTER}.sh | wc -l`
set running = `\ls final_job_${OUTF}.sh | wc -l`

if ($running == "0") then

echo 'Launching the final merging for file '$OUTF' in directory '$OUTDIR_GRID

echo "#\!/bin/bash" > final_job_${MATTER}.sh
echo "source $PACKDIR/batch/PR_processor.sh FINAL MERGED $OUTDIR_GRID $OUTDIR_XROOT $OUTF $RELEASEDIR" >> final_job_${MATTER}.sh
chmod 755 final_job_${MATTER}.sh
echo "#\!/bin/bash" > final_job_${OUTF}.sh
echo "source $PACKDIR/batch/PR_processor.sh FINAL MERGED_${OUTM}_ $OUTDIR_GRID $OUTDIR_XROOT $OUTF $RELEASEDIR" >> final_job_${OUTF}.sh
chmod 755 final_job_${OUTF}.sh

if (${6} == "BATCH") then
bsub -q 1nh -e /dev/null -o /tmp/${LOGNAME}_out.txt final_job_${MATTER}.sh
bsub -q 1nh -e /dev/null -o /tmp/${LOGNAME}_out.txt final_job_${OUTF}.sh
endif
endif
endif
Expand Down
89 changes: 80 additions & 9 deletions L1Trigger/TrackFindingAM/test/batch/PR_processor.sh
Expand Up @@ -76,12 +76,12 @@ fi

if [ ${1} = "MERGE" ]; then

TAG=${2} # The tag of the files we merge (***_START_STOP.root)
INPUTDIR=${3} # Input/Output dirs (lcg friendly)
INPUTROOTDIR=${4} # Input/Output dirs (ROOT friendly)
OUTPUTFILE="MERGED_"${5} # Name of the output file
CMSSW_PROJECT_SRC=${6} # The CMSSW project release dir
GT=${7} # The global tag
TAG=${2} # The tag of the files we merge (***_START_STOP.root)
INPUTDIR=${3} # Input/Output dirs (lcg friendly)
INPUTROOTDIR=${4} # Input/Output dirs (ROOT friendly)
OUTPUTFILE="MERGED_"${5}$TAG # Name of the output file
CMSSW_PROJECT_SRC=${6} # The CMSSW project release dir
GT=${7} # The global tag

#
# Setting up environment variables
Expand All @@ -101,9 +101,11 @@ if [ ${1} = "MERGE" ]; then

compteur=0

for l in `lcg-ls $INPUTDIR | cut -d/ -f14 | grep $TAG`
for ll in `lcg-ls $INPUTDIR | grep $TAG | grep ${5}`
do

l=`basename $ll`

echo $l

echo "cms.InputTag(\"TTPatternsFromStub\", \"AML1Patternsb"${compteur}"\")" >> temp
Expand Down Expand Up @@ -196,17 +198,31 @@ if [ ${1} = "FINAL" ]; then

rm list.txt

for l in `lcg-ls $INPUTDIR | cut -d/ -f14 | grep $TAG`
nfiles=`lcg-ls $INPUTDIR | grep $TAG | wc -l`

for ll in `lcg-ls $INPUTDIR | grep $TAG`
do

l=`basename $ll`
echo $l
echo "$INPUTROOTDIR/$l" >> list.txt

if [ ${nfiles} = "1" ]; then

lcg-cp $INPUTDIR/$l file://$TOP/$l
cp $l $OUTPUTFILE

fi

done

# Do the merging (this one is simple)

edmCopyPickMerge inputFiles_load=list.txt outputFile=$OUTPUTFILE
if [ ${nfiles} != "1" ]; then

edmCopyPickMerge inputFiles_load=list.txt outputFile=$OUTPUTFILE

fi

# Recover the data
#
Expand All @@ -217,3 +233,58 @@ if [ ${1} = "FINAL" ]; then
lcg-cp file://$TOP/$OUTPUTFILE $OUTPUTFULL

fi

#
# Case 4: Fit and extraction
#
# When the ***_with_AMPR.root files have been processed
#

if [ ${1} = "FIT" ]; then

echo "Doing the fit"

INPUT=${2} # The input xrootd file name and address
OUTPUT=${3} # Output file name
OUTPUTE=${4} # Output extracted file name
NEVT=${5} # #evts/file
OUTDIR=${6} # The first event to process in the input file
CMSSW_PROJECT_SRC=${7} # The CMSSW project release dir
GT=${8} # The global tag

#
# Setting up environment variables
#

cd $CMSSW_PROJECT_SRC
export SCRAM_ARCH=slc5_amd64_gcc472
eval `scramv1 runtime -sh`
voms-proxy-info

cd /tmp/$USER
TOP=$PWD

#
# And we tweak the python generation script according to our needs
#

cd $TOP
cp $CMSSW_PROJECT_SRC/src/L1Trigger/TrackFindingAM/test/batch/base/AMFIT_base.py BH_dummy.py

# Finally the script is modified according to the requests

sed "s/NEVTS/$NEVT/" -i BH_dummy.py
sed "s#INPUTFILENAME#$INPUT#" -i BH_dummy.py
sed "s#OUTPUTFILENAME#$OUTPUT#" -i BH_dummy.py
sed "s/MYGLOBALTAG/$GT/" -i BH_dummy.py

cmsRun BH_dummy.py -j4

# Recover the data
#

ls -l
lcg-cp file://$TOP/$OUTPUT ${OUTDIR}/$OUTPUT
lcg-cp file://$TOP/extracted.root ${OUTDIR}/$OUTPUTE

fi
115 changes: 115 additions & 0 deletions L1Trigger/TrackFindingAM/test/batch/base/AMFIT_base.py
@@ -0,0 +1,115 @@
#########################
#
# Configuration file for L1 hough fit
# using a file with AMPR content
#
# This script works on any official production sample
# (assuming that this sample contains a container of TTStubs,
# a container of TTClusters, and a container of TrackingParticles)
#
# And of course, a container of patterns.... (TTTracks)
#
# Instruction to run this script are provided on this page:
#
# http://sviret.web.cern.ch/sviret/Welcome.php?n=CMS.HLLHCTuto
#
# Look at STEP VI
#
# Author: S.Viret (viret@in2p3.fr)
# Date : 20/02/2014
#
# Script tested with release CMSSW_6_2_0_SLHC7
#
#########################

import FWCore.ParameterSet.Config as cms

process = cms.Process('AMFITBASE')

# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.Geometry.GeometryExtendedPhase2TkBE5DReco_cff')
process.load('Configuration.Geometry.GeometryExtendedPhase2TkBE5D_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_PostLS1_cff')
process.load('L1Trigger.TrackFindingAM.L1AMTrack_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.load('L1Trigger.TrackTrigger.TrackTrigger_cff')
process.load("Extractors.RecoExtractor.MIB_extractor_cff")

process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(NEVTS)
)

# Input source
#
# You can use as input file the result of the script AMPR_test.py of part 5.2.2 of the tutorial
#
# Any other EDM file containing patterns and produced with CMSSW 620_SLHC7 should also work
#

process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('INPUTFILENAME'),
duplicateCheckMode = cms.untracked.string( 'noDuplicateCheck' )
)

# Additional output definition
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MYGLOBALTAG', '')

# The name of the stub container over which the association is done, please note that the filtered cluster container is
# not associated due to the lack of simPixelDigis in official samples

#process.TTStubAssociatorFromPixelDigis.TTStubs = cms.VInputTag( cms.InputTag("MergeFITOutput", "StubInTrack"))
#process.TTStubAssociatorFromPixelDigis.TTClusterTruth = cms.VInputTag( cms.InputTag("TTClusterAssociatorFromPixelDigis","ClusterAccepted"))


process.RAWSIMoutput = cms.OutputModule("PoolOutputModule",
splitLevel = cms.untracked.int32(0),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
outputCommands = process.RAWSIMEventContent.outputCommands,
fileName = cms.untracked.string('OUTPUTFILENAME'),
dataset = cms.untracked.PSet(
filterName = cms.untracked.string(''),
dataTier = cms.untracked.string('GEN-SIM')
)
)

# For the moment need to explicitely keep the following containers
# (not yet in the customizing scripts)

process.MIBextraction.doMatch = True
process.MIBextraction.doMC = True
process.MIBextraction.doSTUB = True
process.MIBextraction.doL1TRK = True

process.MIBextraction.L1pattern_tag = cms.InputTag( "MergePROutput", "AML1Patterns")
process.MIBextraction.L1track_tag = cms.InputTag( "MergeFITOutput", "AML1Tracks")
process.MIBextraction.CLUS_container = cms.string( "TTStubsFromPixelDigis")
process.MIBextraction.CLUS_name = cms.string( "ClusterAccepted" )

# Keep the PR output
process.RAWSIMoutput.outputCommands.append('keep *_*_*_AMPRBASE')

# Keep the FIT output
process.RAWSIMoutput.outputCommands.append('keep *_*_*_AMFITBASE')
process.RAWSIMoutput.outputCommands.append('drop *_TTTracksFromPattern_*_*')
process.RAWSIMoutput.outputCommands.append('keep *_*_MergedTrackTruth_*')

# Path and EndPath definitions
process.L1AMFIT_step = cms.Path(process.TTTracksFromPatternswStubs)
process.p = cms.Path(process.MIBextraction)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RAWSIMoutput_step = cms.EndPath(process.RAWSIMoutput)

process.schedule = cms.Schedule(process.L1AMFIT_step,process.p,process.endjob_step,process.RAWSIMoutput_step)

# Automatic addition of the customisation function

from SLHCUpgradeSimulations.Configuration.phase2TkCustomsBE5D import customise as customiseBE5D
from SLHCUpgradeSimulations.Configuration.phase2TkCustomsBE5D import l1EventContent as customise_ev_BE5D

process=customiseBE5D(process)
process=customise_ev_BE5D(process)