Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions bin/AN-13-015.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
mkdir logs
mkdir plots
mkdir plots/fitchecks

nohup python src/cross_section_measurement/01_get_fit_results.py >& plots/fitchecks/correlation_MET.txt &
nohup python src/cross_section_measurement/01_get_fit_results.py -v HT >& plots/fitchecks/correlation_HT.txt &
nohup python src/cross_section_measurement/01_get_fit_results.py -v ST >& plots/fitchecks/correlation_ST.txt &
nohup python src/cross_section_measurement/01_get_fit_results.py -v MT >& plots/fitchecks/correlation_MT.txt &
nohup python src/cross_section_measurement/01_get_fit_results.py -v WPT >& plots/fitchecks/correlation_WPT.txt &
wait

nohup python src/cross_section_measurement/02_unfold_and_measure.py >& logs/02_unfold_and_measure_MET.log &
nohup python src/cross_section_measurement/02_unfold_and_measure.py -v HT >& logs/02_unfold_and_measure_HT.log &
nohup python src/cross_section_measurement/02_unfold_and_measure.py -v ST >& logs/02_unfold_and_measure_ST.log &
nohup python src/cross_section_measurement/02_unfold_and_measure.py -v MT >& logs/02_unfold_and_measure_MT.log &
nohup python src/cross_section_measurement/02_unfold_and_measure.py -v WPT >& logs/02_unfold_and_measure_WPT.log &
wait

nohup python src/cross_section_measurement/03_calculate_systematics.py >& logs/03_calculate_systematics_MET.log &
nohup python src/cross_section_measurement/03_calculate_systematics.py -v HT >& logs/03_calculate_systematics_HT.log &
nohup python src/cross_section_measurement/03_calculate_systematics.py -v ST >& logs/03_calculate_systematics_ST.log &
nohup python src/cross_section_measurement/03_calculate_systematics.py -v MT >& logs/03_calculate_systematics_MT.log &
nohup python src/cross_section_measurement/03_calculate_systematics.py -v WPT >& logs/03_calculate_systematics_WPT.log &
wait

nohup python src/cross_section_measurement/04_make_plots_matplotlib.py >& logs/04_make_plots_matpotlib_MET.log &
nohup python src/cross_section_measurement/04_make_plots_matplotlib.py -v HT >& logs/04_make_plots_matpotlib_HT.log &
nohup python src/cross_section_measurement/04_make_plots_matplotlib.py -v ST >& logs/04_make_plots_matpotlib_ST.log &
nohup python src/cross_section_measurement/04_make_plots_matplotlib.py -v MT >& logs/04_make_plots_matpotlib_MT.log &
nohup python src/cross_section_measurement/04_make_plots_matplotlib.py -v WPT >& logs/04_make_plots_matpotlib_WPT.log &
wait

nohup python src/cross_section_measurement/05_make_tables.py >& logs/05_make_tables_MET.log &
nohup python src/cross_section_measurement/05_make_tables.py -v HT >& logs/05_make_tables_HT.log &
nohup python src/cross_section_measurement/05_make_tables.py -v ST >& logs/05_make_tables_ST.log &
nohup python src/cross_section_measurement/05_make_tables.py -v MT >& logs/05_make_tables_MT.log &
nohup python src/cross_section_measurement/05_make_tables.py -v WPT >& logs/05_make_tables_WPT.log &
wait

nohup python src/cross_section_measurement/99_QCD_cross_checks.py >& logs/99_QCD_cross_checks.log &

nohup python src/cross_section_measurement/98_fit_cross_checks.py >& logs/98_fit_cross_checks_MET.log &
nohup python src/cross_section_measurement/98_fit_cross_checks.py -v HT >& logs/98_fit_cross_checks_HT.log &
nohup python src/cross_section_measurement/98_fit_cross_checks.py -v ST >& logs/98_fit_cross_checks_ST.log &
nohup python src/cross_section_measurement/98_fit_cross_checks.py -v MT >& logs/98_fit_cross_checks_MT.log &
nohup python src/cross_section_measurement/98_fit_cross_checks.py -v WPT >& logs/98_fit_cross_checks_WPT.log &
wait
28 changes: 8 additions & 20 deletions config/cross_section_measurement_7TeV.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

@author: kreczko
'''

centre_of_mass = 7 # TeV

'''
Expand All @@ -15,6 +14,7 @@
new_luminosity = 5050 # pb-1
luminosity_scale = float(new_luminosity)/float(luminosity)
ttbar_xsection = 164 # pb

middle = '_' + str(luminosity) + 'pb_PFElectron_PFMuon_PF2PATJets_PFMET'

data_file_electron = path_to_files + 'central/ElectronHad' + middle + '.root'
Expand All @@ -31,27 +31,30 @@

generator_systematics = [ 'matchingup', 'matchingdown', 'scaleup', 'scaledown' ]
ttbar_generator_systematics = [ 'matchingup', 'matchingdown', 'scaleup', 'scaledown', 'mcatnlo']
central_general_template = path_to_files + 'central/%s' + middle + '.root'
generator_systematic_ttbar_templates = { systematic: path_to_files + 'central/TTJets-%s_%dpb_PFElectron_PFMuon_PF2PATJets_PFMET.root' % (systematic, luminosity) for systematic in ttbar_generator_systematics}
generator_systematic_vjets_templates = { systematic:path_to_files + 'central/VJets-%s_%dpb_PFElectron_PFMuon_PF2PATJets_PFMET.root' % (systematic, luminosity) for systematic in generator_systematics}

pdf_uncertainty_template = path_to_files + 'PDFWeights/TTJet' + middle + '_PDFWeights_%d.root'

categories_and_prefixes = {
'central':'',
'Electron_down':'_minusElectron',
'Electron_up':'_plusElectron',
'Muon_down':'_minusMuon',
'Muon_up':'_plusMuon',
'BJet_down':'_minusBJet',
'BJet_up':'_plusBjet',
'JES_down':'_minusJES',
'JES_up':'_plusJES',
#placeholders
# 'JER_down':'_minusJER',
# 'JER_up':'_plusJER',
'JER_down':'_minusJER',
'JER_up':'_plusJER',
'LightJet_down':'_minusLightJet',
'LightJet_up':'_plusLightJet',
'PU_down':'_PU_64600mb',
'PU_up':'_PU_71400mb'
}


general_category_templates = {category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()}
ttbar_category_templates = {category: path_to_files + category + '/TTJet' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()}
SingleTop_category_templates = {category: path_to_files + category + '/SingleTop' + middle + prefix + '.root' for (category, prefix) in categories_and_prefixes.iteritems()}
Expand All @@ -68,15 +71,6 @@
'JES_down': path_to_files + 'JES_down/SingleMu' + middle + categories_and_prefixes['JES_down'] + '.root'
}

# unfolding_madgraph_file = path_to_files + 'unfolding_TTJets_7TeV_madgraph.root'
# unfolding_powheg = path_to_files + 'unfolding_TTJets_7TeV_powheg.root'
# unfolding_mcatnlo = path_to_files + 'unfolding_TTJets_7TeV_pythia.root'
#
# unfolding_scale_down = path_to_files + 'unfolding_TTJets_7TeV_scaledown.root'
# unfolding_scale_up = path_to_files + 'unfolding_TTJets_7TeV_scaleup.root'
# unfolding_matching_down = path_to_files + 'unfolding_TTJets_7TeV_matchingdown.root'
# unfolding_matching_up = path_to_files + 'unfolding_TTJets_7TeV_matchingup.root'

unfolding_output_general_template = path_to_unfolding_histograms + '%s.root'
unfolding_madgraph_file = path_to_unfolding_histograms + 'unfolding_merged.root'
unfolding_powheg = path_to_unfolding_histograms + 'unfolding_TTJets_7TeV_powheg.root'
Expand All @@ -87,12 +81,6 @@
unfolding_matching_down = path_to_unfolding_histograms + 'unfolding_TTJets_7TeV_matchingdown.root'
unfolding_matching_up = path_to_unfolding_histograms + 'unfolding_TTJets_7TeV_matchingup.root'

# histogram_path_templates = {
# 'MET' : 'TTbarPlusMetAnalysis/%s/Ref selection/BinnedMETAnalysis/%s_%s_bin_%s/%s_AbsEta',
# 'HT' : 'TTbarPlusMetAnalysis/%s/Ref selection/Binned_HT_Analysis/HT_bin_%s/%s_absolute_eta',
# 'ST': 'TTbarPlusMetAnalysis/%s/Ref selection/Binned_ST_Analysis/ST_with_%s_bin_%s/%s_absolute_eta',
# 'MT': 'TTbarPlusMetAnalysis/%s/Ref selection/Binned_MT_Analysis/MT_with_%s_bin_%s/%s_absolute_eta'
# }
histogram_path_templates = {
'MET' : 'TTbar_plus_X_analysis/%s/Ref selection/Binned_MET_Analysis/%s_bin_%s/%s_absolute_eta',
'HT' : 'TTbar_plus_X_analysis/%s/Ref selection/Binned_HT_Analysis/HT_bin_%s/%s_absolute_eta',
Expand Down
15 changes: 4 additions & 11 deletions config/cross_section_measurement_8TeV.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
path_to_files = '/storage/TopQuarkGroup/results/histogramfiles/AN-14-071_first_draft/8TeV/'
path_to_unfolding_histograms = path_to_files + '/unfolding/'

path_to_unfolding_ntuples = '/storage/TopQuarkGroup/mc/8TeV/NoSkimUnfolding/v10/' #for merging
new_luminosity = 19712 # pb-1
luminosity = 19584 # pb-1
luminosity_scale = float(new_luminosity)/float(luminosity)
Expand Down Expand Up @@ -42,6 +41,10 @@

categories_and_prefixes = {
'central':'',
'Electron_down':'_minusElectron',
'Electron_up':'_plusElectron',
'Muon_down':'_minusMuon',
'Muon_up':'_plusMuon',
'BJet_down':'_minusBJet',
'BJet_up':'_plusBjet',
'JES_down':'_minusJES',
Expand Down Expand Up @@ -70,16 +73,6 @@
'JES_down': path_to_files + 'JES_down/SingleMu' + middle + categories_and_prefixes['JES_down'] + '.root'
}

unfolding_input_templates = {'unfolding_merged': path_to_unfolding_ntuples + 'TTJets_MassiveBinDECAY_TuneZ2star_8TeV-madgraph-tauola/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7C-v1_NoSkim/%s*.root',
'unfolding_TTJets_8TeV_mcatnlo': path_to_unfolding_ntuples + 'TT_8TeV-mcatnlo/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7A-v1_NoSkim/%s*.root',
'unfolding_TTJets_8TeV_powheg': path_to_unfolding_ntuples + 'TT_CT10_TuneZ2star_8TeV-powheg-tauola/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7A-v1_NoSkim/%s*.root',
'unfolding_TTJets_8TeV_matchingup': path_to_unfolding_ntuples + 'TTJets_matchingup_TuneZ2star_8TeV-madgraph-tauola/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7A-v1_NoSkim/%s*.root',
'unfolding_TTJets_8TeV_matchingdown': path_to_unfolding_ntuples + 'TTJets_matchingdown_TuneZ2star_8TeV-madgraph-tauola/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7A-v1_NoSkim/%s*.root',
'unfolding_TTJets_8TeV_scaleup': path_to_unfolding_ntuples + 'TTJets_scaleup_TuneZ2star_8TeV-madgraph-tauola/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7A-v1_NoSkim/%s*.root',
'unfolding_TTJets_8TeV_scaledown': path_to_unfolding_ntuples + 'TTJets_scaledown_TuneZ2star_8TeV-madgraph-tauola/unfolding_v10_Summer12_DR53X-PU_S10_START53_V7A-v1_NoSkim/%s*.root'
}

unfolding_output_general_template = path_to_unfolding_histograms + '%s.root'
unfolding_madgraph_file = path_to_unfolding_histograms + 'unfolding_merged.root'
unfolding_powheg = path_to_unfolding_histograms + 'unfolding_TTJets_8TeV_powheg.root'
unfolding_mcatnlo = path_to_unfolding_histograms + 'unfolding_TTJets_8TeV_mcatnlo.root'
Expand Down
39 changes: 37 additions & 2 deletions config/summations_7TeV.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,46 @@
diboson_samples = [ 'WWtoAnything', 'WZtoAnything', 'ZZtoAnything']
signal_samples = [ 'TTJet', 'SingleTop']

wplusjets_matchingup_samples = [ 'WJets-matchingup' ]
dyplusjets_matchingup_samples = [ 'ZJets-matchingup' ]
vplusjets_matchingup_samples = wplusjets_matchingup_samples + dyplusjets_matchingup_samples

wplusjets_matchingdown_samples = [ 'WJets-matchingdown' ]
dyplusjets_matchingdown_samples = [ 'ZJets-matchingdown' ]
vplusjets_matchingdown_samples = wplusjets_matchingdown_samples + dyplusjets_matchingdown_samples

wplusjets_scaledown_samples = [ 'WJets-scaledown' ]
dyplusjets_scaledown_samples = [ 'ZJets-scaledown' ]
vplusjets_scaledown_samples = wplusjets_scaledown_samples + dyplusjets_scaledown_samples

wplusjets_scaleup_samples = [ 'WJets-scaleup' ]
dyplusjets_scaleup_samples = [ 'ZJets-scaleup' ]
vplusjets_scaleup_samples = wplusjets_scaleup_samples + dyplusjets_scaleup_samples

ttjets_unfolding_samples = ['TTJets']
ttjets_mcatnlo_unfolding_samples = ['TTJets']
ttjets_powheg_unfolding_samples = ['TTJets']
ttjets_matchingup_unfolding_samples = ['TTJets-matchingup']
ttjets_matchingdown_unfolding_samples = ['TTJets-matchingdown']
ttjets_scaleup_unfolding_samples = ['TTJets-scaleup']
ttjets_scaledown_unfolding_samples = ['TTJets-scaledown']

sample_summations = {
'QCD_Electron':electron_qcd_samples,
'SingleTop' : singleTop_samples,
'WJets' : wplusjets_samples,
'VJets' : vplusjets_samples,
'DiBoson': diboson_samples,
'Signal': signal_samples
# 'DiBoson': diboson_samples,
'Signal': signal_samples,
'VJets_matchingup' : vplusjets_matchingup_samples,
'VJets_matchingdown' : vplusjets_matchingdown_samples,
'VJets_scaledown' : vplusjets_scaledown_samples,
'VJets_scaleup' : vplusjets_scaleup_samples,
'unfolding_merged' : ttjets_unfolding_samples,
'unfolding_TTJets_7TeV_mcatnlo' : ttjets_mcatnlo_unfolding_samples,
'unfolding_TTJets_7TeV_powheg' : ttjets_powheg_unfolding_samples,
'unfolding_TTJets_7TeV_matchingup' : ttjets_matchingup_unfolding_samples,
'unfolding_TTJets_7TeV_matchingdown' : ttjets_matchingdown_unfolding_samples,
'unfolding_TTJets_7TeV_scaleup' : ttjets_scaleup_unfolding_samples,
'unfolding_TTJets_7TeV_scaledown' : ttjets_scaledown_unfolding_samples,
}
35 changes: 33 additions & 2 deletions experimental/merge_samples_7TeV.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,55 @@

from tools.file_utilities import merge_ROOT_files
import os
import subprocess
import time

new_files = []

#merge generator systematics histogram files and unfolding ntuples
for sample, input_samples in sample_summations.iteritems():
if not sample in ['WJets', 'VJets_matchingup', 'VJets_matchingdown', 'VJets_scaleup', 'VJets_scaledown']: # No 'DYJets' because there is only one inclusive DYJets dataset
continue
print "Merging"
output_file = measurement_config.central_general_template % sample
input_files = [measurement_config.central_general_template % input_sample for input_sample in input_samples]

print output_file
for input_file in input_files:
print input_file

if not os.path.exists(output_file):
merge_ROOT_files(input_files, output_file, compression = 7)
new_files.append(output_file)
print '='*120

# if 8 concurrent processes, wait until they are finished before starting the next set to avoid overloading the machine
while ( int( subprocess.check_output( "ps ax | grep 'hadd' | wc -l", shell = True ) ) - 2 ) >= 8:
time.sleep( 30 ) # sleep for 30 seconds


#merge all other histogram files
for category in measurement_config.categories_and_prefixes.keys():
for sample, input_samples in sample_summations.iteritems():
if not sample in ['VJets', 'SingleTop']:
if not sample in ['VJets', 'SingleTop', 'QCD_Electron']: # No QCD_Muon because there is only one MuEnriched QCD dataset
continue
print "Merging"
output_file = measurement_config.general_category_templates[category] % sample
print output_file
input_files = [measurement_config.general_category_templates[category] % input_sample for input_sample in input_samples]

print output_file
for input_file in input_files:
print input_file

if not os.path.exists(output_file):
merge_ROOT_files(input_files, output_file, compression = 7)
new_files.append(output_file)
print '='*120

#if 8 concurrent processes, wait until they are finished before starting the next set to avoid overloading the machine
while ( int( subprocess.check_output( "ps ax | grep 'hadd' | wc -l", shell = True ) ) - 2 ) >= 8:
time.sleep( 30 ) # sleep for 30 seconds

print '='*120
print 'Created:'
for f in new_files:
Expand Down
55 changes: 33 additions & 22 deletions experimental/merge_samples_8TeV.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,48 +3,59 @@

from tools.file_utilities import merge_ROOT_files
import os
import subprocess
import time

new_files = []

#merge generator systematics histogram files and unfolding ntuples
# merge generator systematics histogram files and unfolding ntuples
for sample, input_samples in sample_summations.iteritems():
if not sample in ['QCD_Electron', 'WJets', 'DYJets', 'VJets_matchingup', 'VJets_matchingdown', 'VJets_scaleup', 'VJets_scaledown', 'unfolding_merged', 'unfolding_TTJets_8TeV_mcatnlo', 'unfolding_TTJets_8TeV_powheg', 'unfolding_TTJets_8TeV_matchingup', 'unfolding_TTJets_8TeV_matchingdown', 'unfolding_TTJets_8TeV_scaleup', 'unfolding_TTJets_8TeV_scaledown']: #
if not sample in ['WJets', 'DYJets', 'VJets_matchingup',
'VJets_matchingdown', 'VJets_scaleup',
'VJets_scaledown']: #
continue
print "Merging"
if 'unfolding' in sample:
# print 'unfolding in sample'
output_file = measurement_config.unfolding_output_general_template % sample
input_files = [measurement_config.unfolding_input_templates[sample] % input_sample for input_sample in input_samples]
else: #if any (generator_systematic in sample for generator_systematic in measurement_config.generator_systematics):
# print 'generator systematic in sample'
output_file = measurement_config.central_general_template % sample
input_files = [measurement_config.central_general_template % input_sample for input_sample in input_samples]
output_file = measurement_config.central_general_template % sample
input_files = [measurement_config.central_general_template % input_sample for input_sample in input_samples]

print output_file
for input_file in input_files:
print input_file
if not os.path.exists(output_file):
merge_ROOT_files(input_files, output_file, compression = 7)
new_files.append(output_file)
print '='*120

if not os.path.exists( output_file ):
merge_ROOT_files( input_files, output_file, compression = 7 )
print "merging ", sample
new_files.append( output_file )
print '=' * 120

# if 8 concurrent processes, wait until they are finished before starting the next set to avoid overloading the machine
while ( int( subprocess.check_output( "ps ax | grep 'hadd' | wc -l", shell = True ) ) - 2 ) >= 8:
time.sleep( 30 ) # sleep for 30 seconds

#merge all other histogram files
# merge all other histogram files
for category in measurement_config.categories_and_prefixes.keys():
for sample, input_samples in sample_summations.iteritems():
if not sample in ['VJets', 'QCD_Muon', 'SingleTop']: #
if not sample in ['QCD_Electron', 'QCD_Muon', 'SingleTop', 'VJets']: #
continue
print "Merging"
output_file = measurement_config.general_category_templates[category] % sample
print output_file
input_files = [measurement_config.general_category_templates[category] % input_sample for input_sample in input_samples]

print output_file
for input_file in input_files:
print input_file
if not os.path.exists(output_file):
merge_ROOT_files(input_files, output_file, compression = 7)
new_files.append(output_file)
print '='*120

if not os.path.exists( output_file ):
merge_ROOT_files( input_files, output_file, compression = 7 )
print "merging ", category, " ", sample
new_files.append( output_file )
print '=' * 120

# if 8 concurrent processes, wait until they are finished before starting the next set to avoid overloading the machine
while ( int( subprocess.check_output( "ps ax | grep 'hadd' | wc -l", shell = True ) ) - 2 ) >= 8:
time.sleep( 30 ) # sleep for 30 seconds

print '='*120
print '=' * 120
print 'Created:'
for f in new_files:
print f
1 change: 1 addition & 0 deletions src/cross_section_measurement/01_get_fit_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from tools.Calculation import decombine_result, combine_complex_results
from tools.Fitting import TMinuitFit, RooFitFit
from tools.file_utilities import write_data_to_JSON
from tools.hist_utilities import hist_to_value_error_tuplelist

def get_histograms(channel, input_files, variable, met_type, variable_bin, b_tag_bin, rebin=1):
global b_tag_bin_VJets
Expand Down
Loading