Skip to content

Commit

Permalink
Merge pull request cms-sw#257 from gpetruc/CMGTools-from-CMSSW_7_2_3
Browse files Browse the repository at this point in the history
Improvements to performance and to plotting tools, update pTrel definition
  • Loading branch information
gpetruc committed Feb 13, 2015
2 parents dd6f321 + 57b374d commit 1ccf461
Show file tree
Hide file tree
Showing 15 changed files with 355 additions and 54 deletions.
78 changes: 58 additions & 20 deletions CMGTools/TTHAnalysis/cfg/run_susyMultilepton_cfg.py
Expand Up @@ -20,9 +20,6 @@
lepAna.packedCandidates = 'packedPFCandidates'
lepAna.miniIsolationPUCorr = 'rhoArea'
lepAna.miniIsolationVetoLeptons = None # use 'inclusive' to veto inclusive leptons and their footprint in all isolation cones
## will become miniIso perhaps?
#lepAna.loose_muon_isoCut = lambda muon : muon.relIso03 < 10.5
#lepAna.loose_electron_isoCut = lambda electron : electron.relIso03 < 10.5


# switch off slow photon MC matching
Expand All @@ -43,6 +40,31 @@
susyCoreSequence.insert(susyCoreSequence.index(ttHCoreEventAna),
ttHHeavyFlavourHadronAna)

## Lepton preselection to use
isolation = "relIso03"
if isolation == "ptRel":
# delay isolation cut for leptons of pt > 10, for which we do pTrel recovery
lepAna.loose_muon_isoCut = lambda muon : muon.relIso03 < 0.5 or muon.pt() > 10
lepAna.loose_electron_isoCut = lambda elec : elec.relIso03 < 0.5 or elec.pt() > 10
# in the cleaning, keep the jet if the lepton fails relIso or ptRel
jetAna.jetLepArbitration = lambda jet,lepton : (
lepton if (lepton.relIso03 < 0.4 or ptRelv1(lepton.p4(),jet.p4()) > 5) else jet
)
# insert a second skimmer after the jet cleaning
ttHLepSkim2 = cfg.Analyzer(
ttHLepSkimmer, name='ttHLepSkimmer2',
minLeptons = 2,
maxLeptons = 999,
)
susyCoreSequence.insert(susyCoreSequence.index(jetAna)+1, ttHLepSkim2)
elif isolation == "miniIso":
lepAna.loose_muon_isoCut = lambda muon : muon.miniRelIso < 0.4
lepAna.loose_electron_isoCut = lambda elec : elec.miniRelIso < 0.4
else:
# nothing to do, will use normal relIso03
pass


from CMGTools.TTHAnalysis.samples.samples_13TeV_PHYS14 import triggers_mumu_iso, triggers_mumu_noniso, triggers_ee, triggers_3e, triggers_mue, triggers_1mu_iso, triggers_1e
triggerFlagsAna.triggerBits = {
'DoubleMu' : triggers_mumu_iso,
Expand Down Expand Up @@ -92,50 +114,66 @@
# comp.splitFactor = 1
# comp.fineSplitFactor = 40

test = 1
if test == 1:
comp = TTH; comp.name = "TTH"
#comp = SMS_T1tttt_2J_mGl1500_mLSP100
from PhysicsTools.HeppyCore.framework.heppy import getHeppyOption
test = getHeppyOption('test')
if test == '1':
comp = TTH
if getHeppyOption('T1tttt'):
comp = SMS_T1tttt_2J_mGl1500_mLSP100
comp.files = comp.files[:1]
#comp.files = [ '/afs/cern.ch/work/g/gpetrucc/micro/CMSSW_7_2_0/src/step3.root' ]
comp.splitFactor = 1
comp.fineSplitFactor = 4
#ttHLepSkim.minLeptons = 0
if not getHeppyOption('single'):
comp.fineSplitFactor = 4
selectedComponents = [ comp ]
elif test == 2:
elif test == '2':
for comp in selectedComponents:
comp.files = comp.files[:1]
comp.splitFactor = 1
comp.fineSplitFactor = 1
elif test == 3:
comp = TTJets
elif test == 'EOS':
comp = DYJetsToLL_M50#TTJets
comp.files = comp.files[:1]
if getHeppyOption('Wigner'):
print "Will read from WIGNER"
comp.files = [ 'root://eoscms//eos/cms/store/mc/Phys14DR/DYJetsToLL_M-50_13TeV-madgraph-pythia8/MINIAODSIM/PU20bx25_PHYS14_25_V1-v1/00000/0432E62A-7A6C-E411-87BB-002590DB92A8.root' ]
else:
print "Will read from CERN Meyrin"
comp.files = [ 'root://eoscms//eos/cms/store/mc/Phys14DR/DYJetsToLL_M-50_13TeV-madgraph-pythia8/MINIAODSIM/PU20bx25_PHYS14_25_V1-v1/10000/F675C068-5E6C-E411-B915-0025907DC9AC.root' ]
os.system("/afs/cern.ch/project/eos/installation/0.3.15/bin/eos.select fileinfo "+comp.files[0].replace("root://eoscms//","/"))
comp.splitFactor = 1
comp.fineSplitFactor = 1
selectedComponents = [ comp ]
elif test == 4:
elif test == 'SingleMu':
comp = SingleMu
comp.files = comp.files[:1]
comp.splitFactor = 1
selectedComponents = [ comp ]
elif test == 10: # sync
elif test == '5':
for comp in selectedComponents:
comp.files = comp.files[:5]
comp.splitFactor = 1
comp.fineSplitFactor = 5
elif test == '2lss-sync': # sync
#eventSelector.toSelect = [ 11809 ]
#sequence = cfg.Sequence([eventSelector] + susyCoreSequence+[ ttHEventAna, treeProducer, ])
jetAna.recalibrateJets = False
jetAna.smearJets = False
comp = SMS_T1tttt_2J_mGl1200_mLSP800
comp.files = [ 'root://eoscms//eos/cms/store/mc/Phys14DR/SMS-T1tttt_2J_mGl-1200_mLSP-800_Tune4C_13TeV-madgraph-tauola/MINIAODSIM/PU20bx25_tsg_PHYS14_25_V1-v1/00000/0CD15D7F-4E6B-E411-AEB4-002590DB9216.root' ]
comp.splitFactor = 1
#comp.fineSplitFactor = 10
comp.fineSplitFactor = 10
selectedComponents = [ comp ]




# the following is declared in case this cfg is used in input to the heppy.py script
from PhysicsTools.HeppyCore.framework.eventsfwlite import Events
from CMGTools.TTHAnalysis.tools.EOSEventsWithDownload import EOSEventsWithDownload
event_class = EOSEventsWithDownload
if getHeppyOption("nofetch"):
event_class = Events
config = cfg.Config( components = selectedComponents,
sequence = sequence,
services = [],
events_class = Events)


events_class = event_class)
8 changes: 7 additions & 1 deletion CMGTools/TTHAnalysis/python/analyzers/ntupleTypes.py
Expand Up @@ -21,7 +21,7 @@
NTupleVariable("mvaTTH", lambda lepton : getattr(lepton, 'mvaValueTTH', -1), help="Lepton MVA (TTH version)"),
NTupleVariable("mvaSusy", lambda lepton : getattr(lepton, 'mvaValueSusy', -1), help="Lepton MVA (SUSY version)"),
NTupleVariable("jetPtRatio", lambda lepton : lepton.pt()/lepton.jet.pt() if hasattr(lepton,'jet') else -1, help="pt(lepton)/pt(nearest jet)"),
NTupleVariable("jetPtRel", lambda lepton : ptRel(lepton.p4(),lepton.jet.p4()) if hasattr(lepton,'jet') else -1, help="pt of the lepton transverse to the jet axis"),
NTupleVariable("jetPtRel", lambda lepton : ptRelv1(lepton.p4(),lepton.jet.p4()) if hasattr(lepton,'jet') else -1, help="pt of the lepton transverse to the jet axis (subtracting the lepton)"),
NTupleVariable("jetBTagCSV", lambda lepton : lepton.jet.btag('combinedInclusiveSecondaryVertexV2BJetTags') if hasattr(lepton,'jet') and hasattr(lepton.jet, 'btag') else -99, help="CSV btag of nearest jet"),
NTupleVariable("jetBTagCMVA", lambda lepton : lepton.jet.btag('combinedMVABJetTags') if hasattr(lepton,'jet') and hasattr(lepton.jet, 'btag') else -99, help="CMA btag of nearest jet"),
NTupleVariable("jetDR", lambda lepton : deltaR(lepton.eta(),lepton.phi(),lepton.jet.eta(),lepton.jet.phi()) if hasattr(lepton,'jet') else -1, help="deltaR(lepton, nearest jet)"),
Expand Down Expand Up @@ -53,6 +53,7 @@
NTupleVariable("jetmaxSip3D", lambda lepton : maxSip3Djettracks(lepton), help="max Sip3D among jet's tracks"),
NTupleVariable("jetmaxSignedSip2D", lambda lepton : maxSignedSip2Djettracks(lepton) , help="max signed Sip2D among jet's tracks"),
NTupleVariable("jetmaxSip2D", lambda lepton : maxSip2Djettracks(lepton), help="max Sip2D among jet's tracks"),
NTupleVariable("jetPtRelv0", lambda lepton : ptRel(lepton.p4(),lepton.jet.p4()) if hasattr(lepton,'jet') else -1, help="pt of the lepton transverse to the jet axis (not subtracting the lepton)"),
])


Expand Down Expand Up @@ -190,3 +191,8 @@ def ptRel(p4,axis):
a = ROOT.TVector3(axis.Vect().X(),axis.Vect().Y(),axis.Vect().Z())
o = ROOT.TLorentzVector(p4.Px(),p4.Py(),p4.Pz(),p4.E())
return o.Perp(a)
def ptRelv1(p4,axis):
axis = axis - p4
a = ROOT.TVector3(axis.Vect().X(),axis.Vect().Y(),axis.Vect().Z())
o = ROOT.TLorentzVector(p4.Px(),p4.Py(),p4.Pz(),p4.E())
return o.Perp(a)
4 changes: 2 additions & 2 deletions CMGTools/TTHAnalysis/python/analyzers/susyCore_modules_cff.py
Expand Up @@ -90,11 +90,11 @@
genAna = cfg.Analyzer(
GeneratorAnalyzer, name="GeneratorAnalyzer",
# BSM particles that can appear with status <= 2 and should be kept
stableBSMParticleIds = { 1000022 },
stableBSMParticleIds = [ 1000022 ],
# Particles of which we want to save the pre-FSR momentum (a la status 3).
# Note that for quarks and gluons the post-FSR doesn't make sense,
# so those should always be in the list
savePreFSRParticleIds = { 1,2,3,4,5, 11,12,13,14,15,16, 21 },
savePreFSRParticleIds = [ 1,2,3,4,5, 11,12,13,14,15,16, 21 ],
# Make also the list of all genParticles, for other analyzers to handle
makeAllGenParticles = True,
# Make also the splitted lists
Expand Down
Expand Up @@ -11,7 +11,6 @@ def declareHandles(self):
def beginLoop(self, setup):
super(ttHHeavyFlavourHadronAnalyzer,self).beginLoop(setup)


def process(self, event):
self.readCollections( event.input )
if not self.cfg_comp.isMC: return True
Expand Down Expand Up @@ -57,6 +56,14 @@ def descendent(child, bhadron):
# OK, here we are
g.flav = myflav
heavyHadrons.append(g)

# if none is found, give up here without going through the rest, so we avoid e.g. mc matching for jets
if len(heavyHadrons) == 0:
event.genHeavyHadrons = heavyHadrons
event.genBHadrons = [ h for h in heavyHadrons if h.flav == 5 ]
event.genDHadrons = [ h for h in heavyHadrons if h.flav == 4 ]
return True

# match with IVF
had_ivf_pairs = []
#print "\nNew event"
Expand All @@ -83,15 +90,23 @@ def descendent(child, bhadron):
# print " had %d is already matched " % (ihad,)
# match with jets:
had_jet_pairs = []
# first loop on jets, get and match daughters
jetsWithMatchedDaughters = []
for j in event.jetsIdOnly:
dausWithMatch = []
for idau in xrange(j.numberOfDaughters()):
dau = j.daughter(idau)
if dau.charge() == 0 or abs(dau.eta()) > 2.5: continue
mct, dr, dpt = matchToGenHadron(dau, event, minDR=0.05, minDpt=0.1)
if mct == None: continue
dausWithMatch.append((dau,mct))
jetsWithMatchedDaughters.append((j,dausWithMatch))
for ihad, had in enumerate(heavyHadrons):
had.jet = None
for ij,j in enumerate(event.jetsIdOnly):
for ij,(j,dausWithMatch) in enumerate(jetsWithMatchedDaughters):
shared_n, shared_pt = 0, 0
for dau in [j.daughter(i) for i in xrange(j.numberOfDaughters())]:
if dau.charge() == 0 or abs(dau.eta()) > 2.5: continue
mct, dr, dpt = matchToGenHadron(dau, event, minDR=0.05, minDpt=0.1)
if mct == None: continue
if descendent(mct,had):
for dau,mct in dausWithMatch:
if descendent(mct,had):
shared_n += 1; shared_pt += mct.pt()
if shared_n:
had_jet_pairs.append( (ihad, ij, shared_n, shared_pt) )
Expand Down
5 changes: 5 additions & 0 deletions CMGTools/TTHAnalysis/python/plotter/mcAnalysis.py
Expand Up @@ -88,6 +88,11 @@ def __init__(self,samples,options):
# Heppy calls the tree just 'tree.root'
rootfile = "%s/%s/%s/tree.root" % (options.path, field[1].strip(), treename)
treename = "tree"
elif (not os.path.exists(rootfile)) and os.path.exists("%s/%s/%s/tree.root.url" % (options.path, field[1].strip(), treename)):
# Heppy calls the tree just 'tree.root'
rootfile = "%s/%s/%s/tree.root" % (options.path, field[1].strip(), treename)
rootfile = open(rootfile+".url","r").readline().strip()
treename = "tree"
pckfile = options.path+"/%s/skimAnalyzerCount/SkimReport.pck" % field[1].strip()
tty = TreeToYield(rootfile, options, settings=extra, name=pname, cname=field[1].strip(), treename=treename)
if signal:
Expand Down
132 changes: 132 additions & 0 deletions CMGTools/TTHAnalysis/python/plotter/mcEfficiencies.py
@@ -0,0 +1,132 @@
#!/usr/bin/env python
#from mcPlots import *
from CMGTools.TTHAnalysis.plotter.mcPlots import *


def addROCMakerOptions(parser):
addMCAnalysisOptions(parser)
parser.add_option("--select-plot", "--sP", dest="plotselect", action="append", default=[], help="Select only these plots out of the full file")
parser.add_option("--exclude-plot", "--xP", dest="plotexclude", action="append", default=[], help="Exclude these plots from the full file")

def doLegend(rocs,options,textSize=0.035):
if options.legend == "TR":
(x1,y1,x2,y2) = (.6, .85 - textSize*max(len(rocs)-3,0), .93, .98)
elif options.legend == "TL":
(x1,y1,x2,y2) = (.2, .85 - textSize*max(len(rocs)-3,0), .53, .98)
else:
(x1,y1,x2,y2) = (.6, .30 + textSize*max(len(rocs)-3,0), .93, .18)
leg = ROOT.TLegend(x1,y1,x2,y2)
leg.SetFillColor(0)
leg.SetShadowColor(0)
leg.SetTextFont(42)
leg.SetTextSize(textSize)
for key,val in rocs:
leg.AddEntry(val, key, "LP")
leg.Draw()
## assign it to a global variable so it's not deleted
global legend_;
legend_ = leg
return leg

def stackEffs(outname,x,effs,options):
alleffs = ROOT.THStack("all","all")
for title,eff in effs:
alleffs.Add(eff)
c1 = ROOT.TCanvas("eff_canvas","eff_canvas")
c1.SetGridy(options.showGrid)
c1.SetGridx(options.showGrid)
c1.SetLogx(x.getOption('Logx',False))
alleffs.Draw("APL");
h0 = effs[0][1].Clone("frame"); h0.Reset();
h0.GetYaxis().SetDecimals()
h0.GetYaxis().SetTitle(options.ytitle)
h0.Draw("AXIS");
alleffs.Draw("NOSTACK P SAME")
if options.xrange:
h0.GetXaxis().SetRangeUser(options.xrange[0], options.xrange[1])
if options.yrange:
h0.GetYaxis().SetRangeUser(options.yrange[0], options.yrange[1])
leg = doLegend(effs,options)
if options.fontsize: leg.SetTextSize(options.fontsize)
c1.Print(outname.replace(".root","")+".png")
c1.Print(outname.replace(".root","")+".eps")
c1.Print(outname.replace(".root","")+".pdf")

def makeEff(mca,cut,idplot,xvarplot):
import copy
is2D = (":" in xvarplot.expr.replace("::","--"))
options = copy.copy(idplot.opts)
options.update(xvarplot.opts)
if is2D: options['Profile2D']=True
else: options['Profile1D']=True
pspec = PlotSpec("%s_vs_%s" % (idplot.name, xvarplot.name),
"%s:%s" % (idplot.expr,xvarplot.expr),
xvarplot.bins,
options)
print pspec.name
return mca.getPlots(pspec,cut,makeSummary=True)

if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser(usage="%prog [options] mc.txt cuts.txt plotfile.txt")
addROCMakerOptions(parser)
parser.add_option("-o", "--out", dest="out", default=None, help="Output file name. by default equal to plots -'.txt' +'.root'");
parser.add_option("--rebin", dest="globalRebin", type="int", default="0", help="Rebin all plots by this factor")
parser.add_option("--xrange", dest="xrange", default=None, nargs=2, type='float', help="X axis range");
parser.add_option("--yrange", dest="yrange", default=None, nargs=2, type='float', help="X axis range");
parser.add_option("--ytitle", dest="ytitle", default="Efficiency", type='string', help="Y axis title");
parser.add_option("--fontsize", dest="fontsize", default=0, type='float', help="Legend font size");
parser.add_option("--grid", dest="showGrid", action="store_true", default=False, help="Show grid lines")
parser.add_option("--groupBy", dest="groupBy", default="process", type="string", help="Group by: cut, process")
parser.add_option("--legend", dest="legend", default="TR", type="string", help="Legend position (BR, TR)")
(options, args) = parser.parse_args()
options.globalRebin = 1
options.allowNegative = True # with the fine bins used in ROCs, one otherwise gets nonsensical results
mca = MCAnalysis(args[0],options)
procs = mca.listSignals()+mca.listBackgrounds()
cut = CutsFile(args[1],options).allCuts()
ids = PlotFile(args[2],options).plots()
xvars = PlotFile(args[3],options).plots()
outname = options.out if options.out else (args[2].replace(".txt","")+".root")
outfile = ROOT.TFile(outname,"RECREATE")
ROOT.gROOT.ProcessLine(".x tdrstyle.cc")
ROOT.gStyle.SetErrorX(0.5)
ROOT.gStyle.SetOptStat(0)
effplots = [ (y,x,makeEff(mca,cut,y,x)) for y in ids for x in xvars ]
if len(procs)>1 and "cut" in options.groupBy:
for x in xvars:
for y,ex,pmap in effplots:
if ex != x: continue
effs = []
myname = outname.replace(".root","_%s_%s.root" % (y.name,x.name))
for proc in procs:
eff = pmap[proc]
if not eff: continue
eff.SetLineColor(mca.getProcessOption(proc,"FillColor",SAFE_COLOR_LIST[len(effs)]))
eff.SetFillColor(mca.getProcessOption(proc,"FillColor",SAFE_COLOR_LIST[len(effs)]))
eff.SetMarkerStyle(mca.getProcessOption(proc,"MarkerStyle",33))
eff.SetMarkerSize(mca.getProcessOption(proc,"MarkerSize",1.4)*0.8)
eff.SetLineWidth(2)
effs.append((mca.getProcessOption(proc,"Label",proc),eff))
if len(effs) == 0: continue
stackEffs(myname,x,effs,options)
if "process" in options.groupBy:
for proc in procs:
for x in xvars:
effs = []
myname = outname.replace(".root","_%s_%s.root" % (proc,x.name))
for y,ex,pmap in effplots:
if ex != x: continue
eff = pmap[proc]
if not eff: continue
eff.SetLineColor(y.getOption("MarkerColor",SAFE_COLOR_LIST[len(effs)]))
eff.SetMarkerColor(y.getOption("MarkerColor",SAFE_COLOR_LIST[len(effs)]))
eff.SetMarkerStyle(y.getOption("MarkerStyle",33))
eff.SetMarkerSize(y.getOption("MarkerSize",1.4)*0.8)
eff.SetLineWidth(2)
effs.append((y.getOption("Title",y.name),eff))
if len(effs) == 0: continue
stackEffs(myname,x,effs,options)
outfile.Close()


5 changes: 5 additions & 0 deletions CMGTools/TTHAnalysis/python/plotter/rocCurves.py
Expand Up @@ -104,13 +104,16 @@ def stackRocks(outname,outfile,rocs,xtit,ytit,options):
allrocs.Add(roc,roc.style)
outfile.WriteTObject(roc)
c1 = ROOT.TCanvas("roc_canvas","roc_canvas")
c1.SetGridy(options.showGrid)
c1.SetGridx(options.showGrid)
allrocs.Draw("APL");
allrocs.GetXaxis().SetTitle(xtit)
allrocs.GetYaxis().SetTitle(ytit)
if options.xrange:
allrocs.GetXaxis().SetRangeUser(options.xrange[0], options.xrange[1])
if options.yrange:
allrocs.GetYaxis().SetRangeUser(options.yrange[0], options.yrange[1])
c1.SetLogx(options.logx)
allrocs.Draw()
leg = doLegend(rocs)
if options.fontsize: leg.SetTextSize(options.fontsize)
Expand All @@ -129,6 +132,8 @@ def stackRocks(outname,outfile,rocs,xtit,ytit,options):
parser.add_option("--fontsize", dest="fontsize", default=0, type='float', help="Legend font size");
parser.add_option("--splitSig", dest="splitSig", default=False, action="store_true", help="Make one ROC per signal")
parser.add_option("--splitBkg", dest="splitBkg", default=False, action="store_true", help="Make one ROC per background")
parser.add_option("--grid", dest="showGrid", action="store_true", default=False, help="Show grid lines")
parser.add_option("--logx", dest="logx", action="store_true", default=False, help="Log x-axis")
parser.add_option("--groupBy", dest="groupBy", default="process", type="string", help="Group by: variable, process")
(options, args) = parser.parse_args()
options.globalRebin = 1
Expand Down

0 comments on commit 1ccf461

Please sign in to comment.