Skip to content

Commit

Permalink
Merge pull request #60 from mbluj/photonPtGt1GeV
Browse files Browse the repository at this point in the history
Increase gamma Et threshold for tau reco&id
  • Loading branch information
roger-wolf committed Oct 11, 2017
2 parents 073022d + 83f1b99 commit dfa9787
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 35 deletions.
10 changes: 5 additions & 5 deletions RecoTauTag/Configuration/python/HPSPFTaus_cff.py
Expand Up @@ -80,12 +80,12 @@
## ByMediumIsolation
hpsPFTauDiscriminationByMediumIsolation = hpsPFTauDiscriminationByLooseIsolation.clone()
hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.minTrackPt = 0.8
hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.minGammaEt = 0.8
hpsPFTauDiscriminationByMediumIsolation.qualityCuts.isolationQualityCuts.minGammaEt = 1.0
hpsPFTauDiscriminationByMediumIsolation.Prediscriminants.preIso.Producer = cms.InputTag("hpsPFTauDiscriminationByMediumChargedIsolation")
## ByTightIsolation
hpsPFTauDiscriminationByTightIsolation = hpsPFTauDiscriminationByLooseIsolation.clone()
hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.minTrackPt = 0.5
hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.minGammaEt = 0.5
hpsPFTauDiscriminationByTightIsolation.qualityCuts.isolationQualityCuts.minGammaEt = 1.0
hpsPFTauDiscriminationByTightIsolation.Prediscriminants.preIso.Producer = cms.InputTag("hpsPFTauDiscriminationByTightChargedIsolation")
## ByLooseIsolationDBSumPtCorr
hpsPFTauDiscriminationByLooseIsolationDBSumPtCorr = hpsPFTauDiscriminationByLooseIsolation.clone(
Expand Down Expand Up @@ -128,7 +128,7 @@
Prediscriminants = requireDecayMode.clone()
)
hpsPFTauDiscriminationByLooseCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minTrackPt = 0.5
hpsPFTauDiscriminationByLooseCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minGammaEt = 0.5
hpsPFTauDiscriminationByLooseCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minGammaEt = 1.0
## ByMediumCombinedIsolationDBSumPtCorr
hpsPFTauDiscriminationByMediumCombinedIsolationDBSumPtCorr = hpsPFTauDiscriminationByMediumIsolationDBSumPtCorr.clone(
ApplyDiscriminationByTrackerIsolation = True,
Expand All @@ -140,7 +140,7 @@
Prediscriminants = requireDecayMode.clone()
)
hpsPFTauDiscriminationByMediumCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minTrackPt = 0.5
hpsPFTauDiscriminationByMediumCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minGammaEt = 0.5
hpsPFTauDiscriminationByMediumCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minGammaEt = 1.0
## ByTightCombinedIsolationDBSumPtCorr
hpsPFTauDiscriminationByTightCombinedIsolationDBSumPtCorr = hpsPFTauDiscriminationByTightIsolationDBSumPtCorr.clone(
ApplyDiscriminationByTrackerIsolation = True,
Expand All @@ -152,7 +152,7 @@
Prediscriminants = requireDecayMode.clone()
)
hpsPFTauDiscriminationByTightCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minTrackPt = 0.5
hpsPFTauDiscriminationByTightCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minGammaEt = 0.5
hpsPFTauDiscriminationByTightCombinedIsolationDBSumPtCorr.qualityCuts.isolationQualityCuts.minGammaEt = 1.0
## ByLooseChargedIsolation
hpsPFTauDiscriminationByLooseChargedIsolation = hpsPFTauDiscriminationByLooseCombinedIsolationDBSumPtCorr.clone(
ApplyDiscriminationByECALIsolation = False
Expand Down
2 changes: 1 addition & 1 deletion RecoTauTag/RecoTau/python/HLTPFRecoTauQualityCuts_cfi.py
Expand Up @@ -15,7 +15,7 @@
maxTransverseImpactParameter = cms.double(0.03), # Should in general be disabled at HLT (PV is sometimes missing)
minTrackVertexWeight = cms.double(-1), # Should in general be disabled at HLT (PV is sometimes missing)
minTrackHits = cms.uint32(3), # total track hits
minGammaEt = cms.double(0.5), # filter PFgammas below given Pt
minGammaEt = cms.double(1.0), # filter PFgammas below given Pt
useTracksInsteadOfPFHadrons = cms.bool(False), # if true, use generalTracks, instead of PFChargedHadrons
),
isolationQualityCuts = cms.PSet(
Expand Down
Expand Up @@ -31,8 +31,8 @@
dRmergePhotonWrtOther = cms.double(0.005),
minBlockElementMatchesPhoton = cms.int32(2),
maxUnmatchedBlockElementsPhoton = cms.int32(1),
minMergeNeutralHadronEt = cms.double(0.),
minMergeGammaEt = cms.double(0.),
minMergeNeutralHadronEt = cms.double(1.0),
minMergeGammaEt = cms.double(1.0),
minMergeChargedHadronPt = cms.double(100.)
)

Expand All @@ -47,8 +47,8 @@
qualityCuts = PFTauQualityCuts,
dRmergeNeutralHadron = cms.double(0.10),
dRmergePhoton = cms.double(0.05),
minMergeNeutralHadronEt = cms.double(0.),
minMergeGammaEt = cms.double(0.),
minMergeNeutralHadronEt = cms.double(1.0),
minMergeGammaEt = cms.double(1.0),
minMergeChargedHadronPt = cms.double(100.)
)

Expand Down
4 changes: 2 additions & 2 deletions RecoTauTag/RecoTau/python/PFRecoTauQualityCuts_cfi.py
Expand Up @@ -14,7 +14,7 @@
minTrackVertexWeight = cms.double(-1.), # Tracks weight in vertex
minTrackPixelHits = cms.uint32(0), # pixel-only hits
minTrackHits = cms.uint32(3), # total track hits
minGammaEt = cms.double(0.5), # filter PFgammas below given Pt
minGammaEt = cms.double(1.0), # filter PFgammas below given Pt
#useTracksInsteadOfPFHadrons = cms.bool(False), # if true, use generalTracks, instead of PFChargedHadrons
minNeutralHadronEt = cms.double(30.)
),
Expand All @@ -36,7 +36,7 @@
minTrackVertexWeight = cms.double(-1.), # Tracks weight in vertex
minTrackPixelHits = cms.uint32(0), # pixel-only hits
minTrackHits = cms.uint32(3), # total track hits
minGammaEt = cms.double(0.5) # filter PFgammas below given Pt
minGammaEt = cms.double(1.0) # filter PFgammas below given Pt
#useTracksInsteadOfPFHadrons = cms.bool(False), # if true, use generalTracks, instead of PFChargedHadrons
),
# The central definition of primary vertex source.
Expand Down
2 changes: 1 addition & 1 deletion RecoTauTag/RecoTau/python/PFRecoTauTagInfoProducer_cfi.py
Expand Up @@ -9,7 +9,7 @@
ChargedHadrCand_tkminPt = cms.double(0.5), # charged PF objects
tkminPt = cms.double(0.5), # track (non-PF) objects
NeutrHadrCand_HcalclusMinEt = cms.double(1.0), # PF neutral hadrons (HCAL)
GammaCand_EcalclusMinEt = cms.double(0.5), # PF gamma candidates (ECAL)
GammaCand_EcalclusMinEt = cms.double(1.0), # PF gamma candidates (ECAL)

# The size of the delta R cone used to collect objects from the jet
ChargedHadrCand_AssociationCone = cms.double(0.8),
Expand Down
44 changes: 22 additions & 22 deletions RecoTauTag/RecoTau/python/RecoTauPiZeroBuilderPlugins_cfi.py
Expand Up @@ -60,8 +60,8 @@
modStrips = strips.clone(
plugin = cms.string('RecoTauPiZeroStripPlugin2'),
applyElecTrackQcuts = cms.bool(False),
minGammaEtStripSeed = cms.double(0.5),
minGammaEtStripAdd = cms.double(0.),
minGammaEtStripSeed = cms.double(1.0),
minGammaEtStripAdd = cms.double(1.0),
minStripEt = cms.double(1.0),
updateStripAfterEachDaughter = cms.bool(False),
maxStripBuildIterations = cms.int32(-1)
Expand All @@ -70,24 +70,24 @@
# Produce a "strips" of photons
# with no track quality cuts applied to PFElectrons
# and eta x phi size of strip increasing for low pT photons
modStrips2 = strips.clone(
plugin = cms.string('RecoTauPiZeroStripPlugin3'),
applyElecTrackQcuts = cms.bool(False),
minGammaEtStripSeed = cms.double(0.5),
minGammaEtStripAdd = cms.double(0.),
minStripEt = cms.double(0.5),
# CV: parametrization of strip size in eta and phi determined by Yuta Takahashi,
# chosen to contain 95% of photons from tau decays
stripEtaAssociationDistance = cms.PSet(
function = cms.string("TMath::Min(0.15, TMath::Max(0.05, [0]*TMath::Power(pT, -[1])))"),
par0 = cms.double(1.97077e-01),
par1 = cms.double(6.58701e-01)
),
stripPhiAssociationDistance = cms.PSet(
function = cms.string("TMath::Min(0.3, TMath::Max(0.05, [0]*TMath::Power(pT, -[1])))"),
par0 = cms.double(3.52476e-01),
par1 = cms.double(7.07716e-01)
),
updateStripAfterEachDaughter = cms.bool(False),
maxStripBuildIterations = cms.int32(-1)
modStrips2 = strips.clone(
plugin = cms.string('RecoTauPiZeroStripPlugin3'),
applyElecTrackQcuts = cms.bool(False),
minGammaEtStripSeed = cms.double(1.0),
minGammaEtStripAdd = cms.double(1.0),
minStripEt = cms.double(1.0),
# CV: parametrization of strip size in eta and phi determined by Yuta Takahashi,
# chosen to contain 95% of photons from tau decays
stripEtaAssociationDistance = cms.PSet(
function = cms.string("TMath::Min(0.15, TMath::Max(0.05, [0]*TMath::Power(pT, -[1])))"),
par0 = cms.double(1.97077e-01),
par1 = cms.double(6.58701e-01)
),
stripPhiAssociationDistance = cms.PSet(
function = cms.string("TMath::Min(0.3, TMath::Max(0.05, [0]*TMath::Power(pT, -[1])))"),
par0 = cms.double(3.52476e-01),
par1 = cms.double(7.07716e-01)
),
updateStripAfterEachDaughter = cms.bool(False),
maxStripBuildIterations = cms.int32(-1)
)

0 comments on commit dfa9787

Please sign in to comment.