diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..a476530d --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,5 @@ +include bin/* +include *.py +include requirements/*.txt +include CONTRIBUTING.md +include README.md \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..74887eba --- /dev/null +++ b/Makefile @@ -0,0 +1,105 @@ +# simple makefile to simplify repetitive build env management tasks under posix + +PYTHON := $(shell which python) +NOSETESTS := $(shell which nosetests) + +INTERACTIVE := $(shell ([ -t 0 ] && echo 1) || echo 0) + +UNAME_S := $(shell uname -s) +PROJECT_NAME := dps + + +ifeq ($(UNAME_S),Darwin) + OPEN := open +else + OPEN := xdg-open +endif + +all: clean inplace test + +# list what would be deleted by clean-repo +clean-repo-check: + @git clean -f -x -d -n + +clean-dict: + @rm -f AutoDict_* + +clean-pyc: + @find . -name "*.pyc" -exec rm {} \; + +clean-so: + @find $(PROJECT_NAME) -name "*.so" -exec rm {} \; + +clean-build: + @rm -rf build + +clean-dist: + @rm -fr dist + @rm -fr $(PROJECT_NAME).egg-info + +clean: clean-build clean-pyc clean-so clean-dict clean-dist + +in: inplace # just a shortcut +inplace: + @$(PYTHON) setup.py build_ext -i + +install: clean + @$(PYTHON) setup.py install + +install-user: clean + @$(PYTHON) setup.py install --user + +sdist: clean + @$(PYTHON) setup.py sdist --release + +register: + @$(PYTHON) setup.py register --release + +upload: clean + @$(PYTHON) setup.py sdist upload --release + +test-code: inplace + @$(NOSETESTS) -v -a '!slow' -s tests + +test-code-full: inplace + @$(NOSETESTS) -v -s tests + +test-code-verbose: inplace + @$(NOSETESTS) -v -a '!slow' -s tests --nologcapture + +test-installed: + @(mkdir -p nose && cd nose && \ + $(NOSETESTS) -v -a '!slow' -s --exe tests && \ + cd .. && rm -rf nose) + +test-doc: + @$(NOSETESTS) -v -s --with-doctest --doctest-tests --doctest-extension=rst \ + --doctest-extension=inc --doctest-fixtures=_fixture docs/ + +test-coverage: + @rm -rf coverage .coverage + @$(NOSETESTS) -s -v -a '!slow' --with-coverage \ + --cover-erase --cover-branches \ + --cover-html --cover-html-dir=coverage rootpy + @if [ "$(INTERACTIVE)" -eq "1" ]; then \ + $(OPEN) coverage/index.html; \ + fi; + +test: test-code + +trailing-spaces: + @find $(PROJECT_NAME) -name "*.py" | xargs perl -pi -e 's/[ \t]*$$//' + +doc: inplace + @make -C docs/ html + +check-rst: + @mkdir -p build + @$(PYTHON) setup.py --long-description | rst2html.py > build/README.html + @$(OPEN) build/README.html + +pep8: + @pep8 --exclude=.git,extern $(PROJECT_NAME) + +flakes: + @./ci/run-pyflakes diff --git a/bin/convert_unfolding b/bin/convert_unfolding index 153e4874..873fd6fe 100755 --- a/bin/convert_unfolding +++ b/bin/convert_unfolding @@ -4,9 +4,9 @@ # This is needed to speed up the unfolding process, as the difference # in read speed between fine-binned and asymmetric is a factor of 200! # TODO: create the combined histograms as well. -from src.cross_section_measurement.lib import convert_unfolding_histograms -from config import XSectionConfig -from tools.Timer import Timer +from dps.analysis.xsection.lib import convert_unfolding_histograms +from dps.config.xsection import XSectionConfig +from dps.utils.Timer import Timer from multiprocessing import Pool config_7TeV = XSectionConfig(7) diff --git a/bin/create_toy_mc_on_DICE b/bin/create_toy_mc_on_DICE index 323c724d..45ac5c4d 100755 --- a/bin/create_toy_mc_on_DICE +++ b/bin/create_toy_mc_on_DICE @@ -4,8 +4,8 @@ ''' from __future__ import print_function from optparse import OptionParser -from condor import job -from condor.jobtypes.create_toy_mc_from_tree_job import CreateToyMCFromTreeJob +from dps.condor import job +from dps.condor.jobtypes.create_toy_mc_from_tree_job import CreateToyMCFromTreeJob def main(): diff --git a/bin/create_unfolding_hists_on_DICE b/bin/create_unfolding_hists_on_DICE index 93ed0302..d057ccf7 100755 --- a/bin/create_unfolding_hists_on_DICE +++ b/bin/create_unfolding_hists_on_DICE @@ -4,9 +4,9 @@ ''' from __future__ import print_function from optparse import OptionParser -from config import XSectionConfig -from condor import job -from condor.jobtypes.produce_unfolding_hists_job import ProduceUnfoldingHistsJob +from dps.config.xsection import XSectionConfig +from dps.condor import job +from dps.condor.jobtypes.produce_unfolding_hists_job import ProduceUnfoldingHistsJob import os import sys diff --git a/bin/create_unfolding_pulls_on_DICE b/bin/create_unfolding_pulls_on_DICE index c560001c..a49cc894 100755 --- a/bin/create_unfolding_pulls_on_DICE +++ b/bin/create_unfolding_pulls_on_DICE @@ -4,12 +4,10 @@ ''' from __future__ import print_function from optparse import OptionParser -from config import XSectionConfig -from condor import job -from condor.jobtypes.unfolding_pull_job_new import UnfoldingPullJob +from dps.config.xsection import XSectionConfig +from dps.condor import job +from dps.condor.jobtypes.unfolding_pull_job_new import UnfoldingPullJob from math import log10 -import os -import sys def get_tau_values(min, max, spacing): taus = [] diff --git a/bin/dice_01 b/bin/dice_01 index 2c843f68..355fa4fe 100755 --- a/bin/dice_01 +++ b/bin/dice_01 @@ -1,10 +1,10 @@ #!/bin/bash echo "Recreating Config Files (Just in case you forgot...)" -python src/cross_section_measurement/create_measurement.py +python dps/analysis/xsection/create_measurement.py echo "Tarring DailyPythonScripts..." tar -cf dps.tar ../DailyPythonScripts/ --exclude ../DailyPythonScripts/data --exclude ../DailyPythonScripts/plots --exclude ../DailyPythonScripts/tables --exclude ../DailyPythonScripts/jobs --exclude ../DailyPythonScripts/unfolding --exclude ../DailyPythonScripts/dps.tar echo "Submitting jobs to DICE..." -condor_submit experimental/condor/01b/01_fit.description +condor_submit dps/experimental/condor/01b/01_fit.description echo "Done." condor_q `whoami` diff --git a/bin/dice_bltUnfold b/bin/dice_bltUnfold index 4d7c8f20..0cbb152f 100755 --- a/bin/dice_bltUnfold +++ b/bin/dice_bltUnfold @@ -1,7 +1,7 @@ #!/bin/bash -./condor/prepare_dps.sh +dps/condor/prepare_dps.sh echo "Submitting jobs to DICE..." -condor_submit src/BLTUnfold/submitBLTUnfold.description +condor_submit dps/analysis/BLTUnfold/submitBLTUnfold.description echo "Done." condor_q `whoami` diff --git a/bin/do_pulls_workflow b/bin/do_pulls_workflow index 955c1108..05b448ca 100755 --- a/bin/do_pulls_workflow +++ b/bin/do_pulls_workflow @@ -46,7 +46,7 @@ echo "Doing unfolding pulls for: $var" echo "Producing pull data for $var variable, $c channel, kv=$k, sqrt(s) = $energy TeV" for j in 1 2 3 4 5 6 7 8 9; do echo "Doing part $j out of 9" - nohup time python src/unfolding_tests/create_unfolding_pull_data.py -v $var -k $k -f data/toy_mc/toy_mc_${var}_N_300_${energy}TeV.root -n 35 ${options[$j-1]} -c $c -s $energy &> logs/pull_${var}_kv${k}_${c}_${j}_${energy}TeV.log & + nohup time python dps/analysis/unfolding_tests/create_unfolding_pull_data.py -v $var -k $k -f data/toy_mc/toy_mc_${var}_N_300_${energy}TeV.root -n 35 ${options[$j-1]} -c $c -s $energy &> logs/pull_${var}_kv${k}_${c}_${j}_${energy}TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -54,7 +54,7 @@ echo "Doing unfolding pulls for: $var" wait; fi done - nohup time python src/unfolding_tests/make_unfolding_pull_plots.py -i data/pull_data/7TeV/$var/35_input_toy_mc/k_value_$k -c $c -v $var -k $k -s $energy &> logs/pull_${var}_kv${k}_${c}_plots_${energy}TeV.log & + nohup time python dps/analysis/unfolding_tests/make_unfolding_pull_plots.py -i data/pull_data/7TeV/$var/35_input_toy_mc/k_value_$k -c $c -v $var -k $k -s $energy &> logs/pull_${var}_kv${k}_${c}_plots_${energy}TeV.log & wait; done done diff --git a/bin/ntuple_info b/bin/ntuple_info index c5fc33da..ca8f9a1b 100755 --- a/bin/ntuple_info +++ b/bin/ntuple_info @@ -5,7 +5,7 @@ from prettytable import PrettyTable from rootpy.io import root_open -from tools.NTuple import FileInfo +from dps.utils.NTuple import FileInfo def main(): options, args = get_parameters() diff --git a/bin/plot b/bin/plot index 60240453..6158a31a 100755 --- a/bin/plot +++ b/bin/plot @@ -80,9 +80,9 @@ Example JSON config structure: from optparse import OptionParser import sys from os.path import exists -from tools.ROOT_utils import set_root_defaults -from tools.file_utilities import write_data_to_JSON, read_data_from_JSON -from tools.HistSet import HistSet +from dps.utils.ROOT_utils import set_root_defaults +from dps.utils.file_utilities import write_data_to_JSON, read_data_from_JSON +from dps.utils.HistSet import HistSet from copy import deepcopy supported_commands = ['compare-files', 'compare-hists'] diff --git a/bin/plot_fit_tests b/bin/plot_fit_tests index 6f4c572f..3261e516 100755 --- a/bin/plot_fit_tests +++ b/bin/plot_fit_tests @@ -2,15 +2,15 @@ echo "Run this only after run_fit_tests" echo "This will take a while ... grab a coffee/tea/water." mkdir -p logs -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/closure_test/ -o 'plots/fitchecks/closure_test' &> logs/plot_fit_checks_closure_test.log & -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/no_merging/closure_test/ -o 'plots/fitchecks/no_merging/closure_test' &> logs/plot_fit_checks_closure_test_no_merging.log & -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/no_constraints/closure_test/ -o 'plots/fitchecks/no_constraints/closure_test' &> logs/plot_fit_checks_closure_test_no_constraints.log & -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/no_constraints_no_merging/closure_test/ -o 'plots/fitchecks/no_constraints_no_merging/closure_test' &> logs/plot_fit_checks_closure_test_NCNM.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/closure_test/ -o 'plots/fitchecks/closure_test' &> logs/plot_fit_checks_closure_test.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/no_merging/closure_test/ -o 'plots/fitchecks/no_merging/closure_test' &> logs/plot_fit_checks_closure_test_no_merging.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/no_constraints/closure_test/ -o 'plots/fitchecks/no_constraints/closure_test' &> logs/plot_fit_checks_closure_test_no_constraints.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/no_constraints_no_merging/closure_test/ -o 'plots/fitchecks/no_constraints_no_merging/closure_test' &> logs/plot_fit_checks_closure_test_NCNM.log & wait -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/ -o 'plots/fitchecks/' &> logs/plot_fit_checks.log & -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/no_merging/ -o 'plots/fitchecks/no_merging/' &> logs/plot_fit_checks_no_merging.log & -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/no_constraints/ -o 'plots/fitchecks/no_constraints/' &> logs/plot_fit_checks_no_constraints.log & -nohup python src/cross_section_measurement/98b_fit_cross_checks.py -p data/no_constraints_no_merging/ -o 'plots/fitchecks/no_constraints_no_merging/' &> logs/plot_fit_checks_NCNM.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/ -o 'plots/fitchecks/' &> logs/plot_fit_checks.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/no_merging/ -o 'plots/fitchecks/no_merging/' &> logs/plot_fit_checks_no_merging.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/no_constraints/ -o 'plots/fitchecks/no_constraints/' &> logs/plot_fit_checks_no_constraints.log & +nohup python dps/analysis/xsection/98b_fit_cross_checks.py -p data/no_constraints_no_merging/ -o 'plots/fitchecks/no_constraints_no_merging/' &> logs/plot_fit_checks_NCNM.log & wait DATE=`date +%d.%m.%Y_%H.%M.%S` tar -czf fit_check_plots_$DATE.tar.gz plots/fitchecks diff --git a/bin/qcd_from_data b/bin/qcd_from_data index a34070e2..5d3cc59c 100755 --- a/bin/qcd_from_data +++ b/bin/qcd_from_data @@ -48,9 +48,10 @@ Uses the data_file to extract the templates, removes other samples from ROOT import gROOT gcd = gROOT.cd from optparse import OptionParser -from tools.file_utilities import write_data_to_JSON, read_data_from_JSON -from tools.ROOT_utililities import root_mkdir, find_btag, get_histogram_dictionary -from tools.hist_utilities import clean_control_region +from dps.utils.file_utilities import read_data_from_JSON +# @BROKEN +from dps.utils.ROOT_utililities import root_mkdir, find_btag, get_histogram_dictionary +from dps.utils.hist_utilities import clean_control_region from rootpy.io import root_open def main(): diff --git a/bin/run_fit_tests b/bin/run_fit_tests index fd42c231..7944f194 100755 --- a/bin/run_fit_tests +++ b/bin/run_fit_tests @@ -5,11 +5,11 @@ for fitComb in absolute_eta M_bl M3 angle_bl absolute_eta,M_bl absolute_eta,M3 a for var in MET HT ST WPT MT; do nicevar=`echo $var | sed 's/,/_/g'` echo "Doing variable set: $fitComb, $nicevar" - nohup time python src/cross_section_measurement/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test &> logs/fit_closure_test_$nicevar.log & - nohup time python src/cross_section_measurement/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='qcd_only' &> logs/fit_closure_test_${nicevar}_qcd.log & - nohup time python src/cross_section_measurement/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='vjets_only' &> logs/fit_closure_test_${nicevar}_vjets.log & - nohup time python src/cross_section_measurement/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='ttbar_only' &> logs/fit_closure_test_${nicevar}_ttbar.log & - nohup time python src/cross_section_measurement/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='singletop_only' &> logs/fit_closure_test_${nicevar}_singletop.log & + nohup time python dps/analysis/xsection/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test &> logs/fit_closure_test_$nicevar.log & + nohup time python dps/analysis/xsection/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='qcd_only' &> logs/fit_closure_test_${nicevar}_qcd.log & + nohup time python dps/analysis/xsection/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='vjets_only' &> logs/fit_closure_test_${nicevar}_vjets.log & + nohup time python dps/analysis/xsection/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='ttbar_only' &> logs/fit_closure_test_${nicevar}_ttbar.log & + nohup time python dps/analysis/xsection/01_get_fit_results.py --fit-variables $fitComb -v $var --no_combined_signal --closure_test --closure_test_type='singletop_only' &> logs/fit_closure_test_${nicevar}_singletop.log & wait; done done diff --git a/bin/run_unfolding_tests b/bin/run_unfolding_tests index 4b14d316..5b5f9e7b 100755 --- a/bin/run_unfolding_tests +++ b/bin/run_unfolding_tests @@ -3,10 +3,10 @@ echo "This will take a while ... grab a coffee/tea/water" mkdir -p logs for CoM in 7 8; do echo "Doing centre of mass energy: $CoM TeV" - nohup time python src/unfolding_tests/k_value_determination.py -c $CoM &> logs/k_value_determination_${CoM}TeV.log & - nohup time python src/unfolding_tests/k_value_optimisation_plots.py -c $CoM &> logs/k_value_optmisation_${CoM}TeV.log & - nohup time python src/unfolding_tests/compare_unfolding_parameters.py -c $CoM -t closure &> logs/compare_unfolding_closure_${CoM}TeV.log & - nohup time python src/unfolding_tests/compare_unfolding_parameters.py -c $CoM -t data &> logs/compare_unfolding_data_${CoM}TeV.log & + nohup time python dps/analysis/unfolding_tests/k_value_determination.py -c $CoM &> logs/k_value_determination_${CoM}TeV.log & + nohup time python dps/analysis/unfolding_tests/k_value_optimisation_plots.py -c $CoM &> logs/k_value_optmisation_${CoM}TeV.log & + nohup time python dps/analysis/unfolding_tests/compare_unfolding_parameters.py -c $CoM -t closure &> logs/compare_unfolding_closure_${CoM}TeV.log & + nohup time python dps/analysis/unfolding_tests/compare_unfolding_parameters.py -c $CoM -t data &> logs/compare_unfolding_data_${CoM}TeV.log & wait; done diff --git a/bin/x_01_all_vars b/bin/x_01_all_vars index bc999e9d..f398a7c5 100755 --- a/bin/x_01_all_vars +++ b/bin/x_01_all_vars @@ -10,7 +10,7 @@ echo "Using the fit variable(s): $fit_var" i=0 for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Fitting distribution: $var" - nohup time python src/cross_section_measurement/01_get_fit_results.py -V -v $var --fit-variables $fit_var &> logs/01_${var}_fit_13TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/01_get_fit_results.py -V -v $var --fit-variables $fit_var &> logs/01_${var}_fit_13TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_01b_all_vars b/bin/x_01b_all_vars index acbd955e..c191c3c1 100755 --- a/bin/x_01b_all_vars +++ b/bin/x_01b_all_vars @@ -7,7 +7,7 @@ N_JOBS=4 i=0 for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Fitting distribution: $var" - nohup time python src/cross_section_measurement/01_get_ttjet_normalisation.py -v $var -i config/measurements/background_subtraction &> logs/01_${var}_bgs_13TeV_fullPS.log & + nohup time python dps/analysis/xsection/01_get_ttjet_normalisation.py -v $var -i config/measurements/background_subtraction &> logs/01_${var}_bgs_13TeV_fullPS.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -22,7 +22,7 @@ for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do # since we use the same bins in both full and visible phase space, the normalisation result will be identical mkdir -p data/normalisation/background_subtraction/13TeV/$var/VisiblePS cp -r data/normalisation/background_subtraction/13TeV/$var/FullPS/* data/normalisation/background_subtraction/13TeV/$var/VisiblePS/. - #nohup time python src/cross_section_measurement/01_get_ttjet_normalisation.py -v $var -i config/measurements/background_subtraction --visiblePS &> logs/01_${var}_bgs_13TeV_visiblePS.log & + #nohup time python dps/analysis/xsection/01_get_ttjet_normalisation.py -v $var -i config/measurements/background_subtraction --visiblePS &> logs/01_${var}_bgs_13TeV_visiblePS.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_02_all_vars b/bin/x_02_all_vars index d6e2c4e3..c350a291 100755 --- a/bin/x_02_all_vars +++ b/bin/x_02_all_vars @@ -10,7 +10,7 @@ echo "Using the fit variable(s): $fit_var" i=0 for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Unfolding distribution: $var" - nohup time python src/cross_section_measurement/02_unfold_and_measure.py -v $var -c 13 -p data/$nice_fit_var &> logs/02_${var}_unfold_13TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/02_unfold_and_measure.py -v $var -c 13 -p data/$nice_fit_var &> logs/02_${var}_unfold_13TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -22,7 +22,7 @@ done echo "Visible phase space" for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Unfolding distribution: $var" - nohup time python src/cross_section_measurement/02_unfold_and_measure.py --visiblePS -v $var -c 13 -p data/$nice_fit_var &> logs/02_${var}_unfold_13TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/02_unfold_and_measure.py --visiblePS -v $var -c 13 -p data/$nice_fit_var &> logs/02_${var}_unfold_13TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_02b_all_vars b/bin/x_02b_all_vars index 97a66255..c3823edc 100755 --- a/bin/x_02b_all_vars +++ b/bin/x_02b_all_vars @@ -18,7 +18,7 @@ i=0 echo "Visible phase space" for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Unfolding distribution: $var" - nohup time python src/cross_section_measurement/02_unfold_and_measure.py --visiblePS -v $var -c 13 -p data/normalisation/background_subtraction/ &> logs/02_${var}_Vis_unfold_13TeV.log & + nohup time python dps/analysis/xsection/02_unfold_and_measure.py --visiblePS -v $var -c 13 -p data/normalisation/background_subtraction/ &> logs/02_${var}_Vis_unfold_13TeV.log & let i+=1 shallIwait $i $N_JOBS done diff --git a/bin/x_03_all_vars b/bin/x_03_all_vars index 1f67506a..bd7e16a6 100755 --- a/bin/x_03_all_vars +++ b/bin/x_03_all_vars @@ -10,7 +10,7 @@ echo "Using the fit variable(s): $fit_var" i=0 for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Calculating diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/03_calculate_systematics.py -s -v $var -c 13 -p data/$nice_fit_var &> logs/03_${var}_calculate_13TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/03_calculate_systematics.py -s -v $var -c 13 -p data/$nice_fit_var &> logs/03_${var}_calculate_13TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -22,7 +22,7 @@ done echo "Visible phase space" for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Calculating diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/03_calculate_systematics.py --visiblePS -s -v $var -c 13 -p data/$nice_fit_var &> logs/03_${var}_calculate_13TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/03_calculate_systematics.py --visiblePS -s -v $var -c 13 -p data/$nice_fit_var &> logs/03_${var}_calculate_13TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_03b_all_vars b/bin/x_03b_all_vars index 946528ec..468d5d6e 100755 --- a/bin/x_03b_all_vars +++ b/bin/x_03b_all_vars @@ -15,7 +15,7 @@ i=0 echo "Visible phase space" for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Calculating diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/03_calculate_systematics.py --visiblePS -s -v $var -c 13 -u TUnfold -p data/normalisation/background_subtraction/ &> logs/03_${var}_calculate_Vis_13TeV.log & + nohup time python dps/analysis/xsection/03_calculate_systematics.py --visiblePS -s -v $var -c 13 -u TUnfold -p data/normalisation/background_subtraction/ &> logs/03_${var}_calculate_Vis_13TeV.log & let i+=1 shallIwait $i $N_JOBS done diff --git a/bin/x_04_all_vars b/bin/x_04_all_vars index 5caf24e8..2cbdbd9e 100755 --- a/bin/x_04_all_vars +++ b/bin/x_04_all_vars @@ -11,7 +11,7 @@ echo "Using the fit variable(s): $fit_var" i=0 for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Plotting diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/04_make_plots_matplotlib.py -v $var -c 13 -p data/$nice_fit_var &> logs/04_${var}_plot_13TeV_${nice_fit_var}.log & # -a <--add this option for additional plots + nohup time python dps/analysis/xsection/04_make_plots_matplotlib.py -v $var -c 13 -p data/$nice_fit_var &> logs/04_${var}_plot_13TeV_${nice_fit_var}.log & # -a <--add this option for additional plots let i+=1 if (( $i % N_JOBS == 0 )) then @@ -23,7 +23,7 @@ done echo "Visible phase space" for var in MET HT ST WPT MT lepTopPt lepTopRap hadTopPt hadTopRap ttbarPt ttbarM ttbarRap; do echo "Plotting diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/04_make_plots_matplotlib.py --visiblePS -v $var -c 13 -p data/$nice_fit_var &> logs/04_${var}_plot_13TeV_${nice_fit_var}.log & # -a <--add this option for additional plots + nohup time python dps/analysis/xsection/04_make_plots_matplotlib.py --visiblePS -v $var -c 13 -p data/$nice_fit_var &> logs/04_${var}_plot_13TeV_${nice_fit_var}.log & # -a <--add this option for additional plots let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_04b_all_vars b/bin/x_04b_all_vars index 86d26068..aa8a8380 100755 --- a/bin/x_04b_all_vars +++ b/bin/x_04b_all_vars @@ -15,7 +15,7 @@ i=0 echo "Visible phase space" for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Plotting diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/04_make_plots_matplotlib.py --visiblePS --show-generator-ratio -v $var -c 13 -p data/normalisation/background_subtraction/ -o plots/background_subtraction &> logs/04_${var}_plot_Vis_13TeV.log & # -a <--add this option for additional plots + nohup time python dps/analysis/xsection/04_make_plots_matplotlib.py --visiblePS --show-generator-ratio -v $var -c 13 -p data/normalisation/background_subtraction/ -o plots/background_subtraction &> logs/04_${var}_plot_Vis_13TeV.log & # -a <--add this option for additional plots let i+=1 shallIwait $i $N_JOBS done diff --git a/bin/x_05_all_vars b/bin/x_05_all_vars index ef94755f..27e2a8fd 100755 --- a/bin/x_05_all_vars +++ b/bin/x_05_all_vars @@ -29,7 +29,7 @@ echo "Visible phase space ..." i=0 for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Tabulating diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/05_make_tables.py -v $var -c 13 -p $input_folder -o $output_folder -a --visiblePS &> logs/05_${var}_table_13TeV_vis.log & + nohup time python dps/analysis/xsection/05_make_tables.py -v $var -c 13 -p $input_folder -o $output_folder -a --visiblePS &> logs/05_${var}_table_13TeV_vis.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_05b_all_vars b/bin/x_05b_all_vars index d8ebd9aa..1e5b5875 100755 --- a/bin/x_05b_all_vars +++ b/bin/x_05b_all_vars @@ -7,7 +7,7 @@ N_JOBS=4 i=0 # for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do # echo "Tabulating diff. x-section for distribution: $var" -# nohup time python src/cross_section_measurement/05_make_tables.py -v $var -c 13 -p /hdfs/TopQuarkGroup/run2/dpsData/data/normalisation/background_subtraction/ -a -o tables/background_subtraction &> logs/05_${var}_table_13TeV.log & +# nohup time python dps/analysis/xsection/05_make_tables.py -v $var -c 13 -p /hdfs/TopQuarkGroup/run2/dpsData/data/normalisation/background_subtraction/ -a -o tables/background_subtraction &> logs/05_${var}_table_13TeV.log & # let i+=1 # if (( $i % N_JOBS == 0 )) # then @@ -20,7 +20,7 @@ echo "Now visible phase space" for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Tabulating diff. x-section for distribution: $var" - nohup time python src/cross_section_measurement/05_make_tables.py -v $var -c 13 -p data/normalisation/background_subtraction/ -a --visiblePS -o tables/background_subtraction &> logs/05_${var}_table_13TeV.log & + nohup time python dps/analysis/xsection/05_make_tables.py -v $var -c 13 -p data/normalisation/background_subtraction/ -a --visiblePS -o tables/background_subtraction &> logs/05_${var}_table_13TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_98_all_vars b/bin/x_98_all_vars index fcd366af..013358cb 100755 --- a/bin/x_98_all_vars +++ b/bin/x_98_all_vars @@ -11,7 +11,7 @@ echo "Using the fit variable(s): $fit_var" i=0 for var in MET HT ST WPT MT; do echo "Plotting fit correlations for distribution: $var" - nohup time python src/cross_section_measurement/98_fit_cross_checks.py -v $var -e 7 --fit-variables $fit_var &> logs/98_${var}_fit_cross_checks_7TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/98_fit_cross_checks.py -v $var -e 7 --fit-variables $fit_var &> logs/98_${var}_fit_cross_checks_7TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -19,7 +19,7 @@ for var in MET HT ST WPT MT; do wait; fi - nohup time python src/cross_section_measurement/98_fit_cross_checks.py -v $var -e 8 --fit-variables $fit_var &> logs/98_${var}_fit_cross_checks_8TeV_${nice_fit_var}.log & + nohup time python dps/analysis/xsection/98_fit_cross_checks.py -v $var -e 8 --fit-variables $fit_var &> logs/98_${var}_fit_cross_checks_8TeV_${nice_fit_var}.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -35,9 +35,9 @@ echo "98_fit_cross_checks all done!" #i=0 #for var in MET HT ST WPT MT; do # echo "Doing 98b cross checks for distribution: $var" -# nohup time python src/cross_section_measurement/98b_fit_cross_checks.py -v $var -e 7 &> logs/98b_${var}_fit_cross_checks_7TeV_${nice_fit_var}.log & +# nohup time python dps/analysis/xsection/98b_fit_cross_checks.py -v $var -e 7 &> logs/98b_${var}_fit_cross_checks_7TeV_${nice_fit_var}.log & # let i+=1 -# nohup time python src/cross_section_measurement/98b_fit_cross_checks.py -v $var -e 8 &> logs/98b_${var}_fit_cross_checks_8TeV_${nice_fit_var}.log & +# nohup time python dps/analysis/xsection/98b_fit_cross_checks.py -v $var -e 8 &> logs/98b_${var}_fit_cross_checks_8TeV_${nice_fit_var}.log & # let i+=1 # if (( $i % N_JOBS == 0 )) # then @@ -51,9 +51,9 @@ echo "98_fit_cross_checks all done!" #i=0 #for var in MET HT ST WPT MT; do # echo "Doing 98c cross checks for distribution: $var" -# nohup time python src/cross_section_measurement/98c_fit_cross_checks.py -v $var -e 7 &> logs/98c_${var}_fit_cross_checks_7TeV_${nice_fit_var}.log & +# nohup time python dps/analysis/xsection/98c_fit_cross_checks.py -v $var -e 7 &> logs/98c_${var}_fit_cross_checks_7TeV_${nice_fit_var}.log & # let i+=1 -# nohup time python src/cross_section_measurement/98c_fit_cross_checks.py -v $var -e 8 &> logs/98c_${var}_fit_cross_checks_8TeV_${nice_fit_var}.log & +# nohup time python dps/analysis/xsection/98c_fit_cross_checks.py -v $var -e 8 &> logs/98c_${var}_fit_cross_checks_8TeV_${nice_fit_var}.log & # let i+=1 # if (( $i % N_JOBS == 0 )) # then diff --git a/bin/x_99_QCD_cross_checks b/bin/x_99_QCD_cross_checks index 61100624..8e4263c2 100755 --- a/bin/x_99_QCD_cross_checks +++ b/bin/x_99_QCD_cross_checks @@ -8,7 +8,7 @@ N_JOBS=6 i=0 echo "Making 99_QCD_cross_checks plots" -nohup time python src/cross_section_measurement/99_QCD_cross_checks.py -e 7 &> logs/99_QCD_cross_checks_7TeV.log & +nohup time python dps/analysis/xsection/99_QCD_cross_checks.py -e 7 &> logs/99_QCD_cross_checks_7TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -16,7 +16,7 @@ then wait; fi -nohup time python src/cross_section_measurement/99_QCD_cross_checks.py -e 8 &> logs/99_QCD_cross_checks_8TeV.log & +nohup time python dps/analysis/xsection/99_QCD_cross_checks.py -e 8 &> logs/99_QCD_cross_checks_8TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_make_binning_plots b/bin/x_make_binning_plots index 77dd2db3..8305fa06 100755 --- a/bin/x_make_binning_plots +++ b/bin/x_make_binning_plots @@ -8,7 +8,7 @@ N_JOBS=6 i=0 echo "Making binning plots" -nohup time python src/cross_section_measurement/make_binning_plots.py -c 7 &> logs/make_binning_plots_7TeV.log & +nohup time python dps/analysis/xsection/make_binning_plots.py -c 7 &> logs/make_binning_plots_7TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -16,7 +16,7 @@ then wait; fi -nohup time python src/cross_section_measurement/make_binning_plots.py -c 8 &> logs/make_binning_plots_8TeV.log & +nohup time python dps/analysis/xsection/make_binning_plots.py -c 8 &> logs/make_binning_plots_8TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_make_control_plots b/bin/x_make_control_plots index 1cc5864f..c48c5034 100755 --- a/bin/x_make_control_plots +++ b/bin/x_make_control_plots @@ -8,8 +8,8 @@ N_JOBS=6 i=0 echo "Making control plots" -nohup time python src/cross_section_measurement/make_control_plots.py -c 7 &> logs/make_control_plots_7TeV.log & -nohup time python src/cross_section_measurement/make_control_plots.py -c 7 -n &> logs/make_control_plots_normalised_to_fit_7TeV.log & +nohup time python dps/analysis/xsection/make_control_plots.py -c 7 &> logs/make_control_plots_7TeV.log & +nohup time python dps/analysis/xsection/make_control_plots.py -c 7 -n &> logs/make_control_plots_normalised_to_fit_7TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then @@ -17,8 +17,8 @@ then wait; fi -nohup time python src/cross_section_measurement/make_control_plots.py -c 8 &> logs/make_control_plots_8TeV.log & -nohup time python src/cross_section_measurement/make_control_plots.py -c 8 -n &> logs/make_control_plots_normalised_to_fit_8TeV.log & +nohup time python dps/analysis/xsection/make_control_plots.py -c 8 &> logs/make_control_plots_8TeV.log & +nohup time python dps/analysis/xsection/make_control_plots.py -c 8 -n &> logs/make_control_plots_normalised_to_fit_8TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/bin/x_make_fit_variable_plots b/bin/x_make_fit_variable_plots index 9602f47f..2b664794 100755 --- a/bin/x_make_fit_variable_plots +++ b/bin/x_make_fit_variable_plots @@ -8,9 +8,9 @@ N_JOBS=6 i=0 echo "Making fit variable plots" -nohup time python src/cross_section_measurement/make_fit_variable_plots.py -c 7 &> logs/make_fit_variable_plots_7TeV.log & +nohup time python dps/analysis/xsection/make_fit_variable_plots.py -c 7 &> logs/make_fit_variable_plots_7TeV.log & let i+=1 -nohup time python src/cross_section_measurement/make_fit_variable_plots.py -c 8 &> logs/make_fit_variable_plots_8TeV.log & +nohup time python dps/analysis/xsection/make_fit_variable_plots.py -c 8 &> logs/make_fit_variable_plots_8TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then diff --git a/config/tests/__init__.py b/ci/pyflakes.exclude similarity index 100% rename from config/tests/__init__.py rename to ci/pyflakes.exclude diff --git a/examples/__init__.py b/ci/pyflakes.ignore similarity index 100% rename from examples/__init__.py rename to ci/pyflakes.ignore diff --git a/ci/run-pyflakes b/ci/run-pyflakes new file mode 100755 index 00000000..02fa1c6e --- /dev/null +++ b/ci/run-pyflakes @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# +# Utility script to run pyflakes with the modules we care about and +# exclude errors we know to be fine. + +import os +import re +import subprocess +import sys + + +def main(): + + modules = sys.argv[1:] + + if not modules: + modules = ['dps'] + + p = subprocess.Popen(['pyflakes'] + modules, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + close_fds=True) + + contents = p.stdout.readlines() + + # Read in the ignore file + ignore = [] + fp = open("ci/pyflakes.ignore", "r") + + for line in fp.readlines(): + ignore.append(line.rstrip()) + + fp.close() + + # Read in the exclusions file + exclusions = {} + fp = open("ci/pyflakes.exclude", "r") + + for line in fp.readlines(): + exclusions[line.rstrip()] = 1 + + fp.close() + + # Now filter things + for line in contents: + line = line.rstrip() + skip = False + for ignore_line in ignore: + if line.startswith(ignore_line): + skip = True + break + if skip: + continue + test_line = re.sub(r':[0-9]+:', r':*:', line, 1) + test_line = re.sub(r'line [0-9]+', r'line *', test_line) + + if test_line not in exclusions: + print(line) + +if __name__ == "__main__": + main() diff --git a/ci/test.sh b/ci/test.sh index 9b62472c..cdc33988 100755 --- a/ci/test.sh +++ b/ci/test.sh @@ -32,4 +32,4 @@ sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm #- sudo chmod a+w /dev/shm ls -la /dev/shm -time nosetests -v +time make test diff --git a/config/__init__.py b/config/__init__.py deleted file mode 100644 index 8f661adf..00000000 --- a/config/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import absolute_import - -from .cross_section_config import XSectionConfig - -__all__ = [ - 'XSectionConfig', -] diff --git a/config/histogramColours.py b/config/histogramColours.py deleted file mode 100644 index 6a767c55..00000000 --- a/config/histogramColours.py +++ /dev/null @@ -1,7 +0,0 @@ -histogram_colours = { - 'data' : 'black', - 'QCD' : 290, # kYellow-10 - 'V+Jets' : 413, # kGreen-3 - 'Single-Top' : 616, # kMagenta - 'TTJet' : 633, # kRed+1 -} \ No newline at end of file diff --git a/config/tests/test_XSectionConfig.py b/config/tests/test_XSectionConfig.py deleted file mode 100644 index c646fc93..00000000 --- a/config/tests/test_XSectionConfig.py +++ /dev/null @@ -1,56 +0,0 @@ -''' -Created on 14 May 2014 - -@author: kreczko -''' -import unittest -from config import XSectionConfig -from __builtin__ import getattr - - -class Test(unittest.TestCase): - - def setUp(self): - self.config_7TeV = XSectionConfig(centre_of_mass_energy=7) - self.config_8TeV = XSectionConfig(centre_of_mass_energy=8) - self.config_13TeV = XSectionConfig(centre_of_mass_energy=13) - - def test_current_analysis_path(self): - self.assertTrue(XSectionConfig.current_analysis_path.endswith('/')) - - def test_paths(self): - self.assertTrue(XSectionConfig.current_analysis_path.endswith('/')) - self.assertTrue(self.config_7TeV.path_to_files.endswith('/')) - self.assertTrue( - self.config_7TeV.path_to_unfolding_histograms.endswith('/')) - self.assertTrue(self.config_8TeV.path_to_files.endswith('/')) - - self.assertTrue('7TeV' in self.config_7TeV.path_to_files) - self.assertTrue('8TeV' in self.config_8TeV.path_to_files) - self.assertTrue('13TeV' in self.config_13TeV.path_to_files) - - unfolding_files = ['unfolding_powheg_pythia_raw', 'unfolding_powheg_herwig_raw', - 'unfolding_mcatnlo_raw', 'unfolding_scale_down_raw', - 'unfolding_scale_up_raw', 'unfolding_matching_down_raw', - 'unfolding_matching_up_raw', ] - for u_file in unfolding_files: - full_path = getattr(self.config_7TeV, u_file) - self.assertEqual(full_path.count('7TeV'), 2) - full_path = getattr(self.config_8TeV, u_file) - self.assertEqual(full_path.count('8TeV'), 2) - - def test_invalid_centre_of_mass_energy(self): - self.assertRaises(AttributeError, XSectionConfig, (1232)) - - def test_luminosity(self): - self.assertEqual(self.config_7TeV.luminosity, 5050) - self.assertEqual(self.config_8TeV.luminosity, 19584) - - def test_parameters(self): - for param in XSectionConfig.parameters: - self.assertTrue( - hasattr(self.config_7TeV, param), 'Parameter ' + param + ' not found.') - - -if __name__ == "__main__": - unittest.main() diff --git a/dps/__init__.py b/dps/__init__.py new file mode 100644 index 00000000..030b6316 --- /dev/null +++ b/dps/__init__.py @@ -0,0 +1,2 @@ +from __future__ import absolute_import +__version__ = '2.1.0' diff --git a/src/BLTUnfold/PDFVariationPlotter.py b/dps/analysis/BLTUnfold/PDFVariationPlotter.py similarity index 81% rename from src/BLTUnfold/PDFVariationPlotter.py rename to dps/analysis/BLTUnfold/PDFVariationPlotter.py index f446cfe1..d1b41831 100644 --- a/src/BLTUnfold/PDFVariationPlotter.py +++ b/dps/analysis/BLTUnfold/PDFVariationPlotter.py @@ -1,12 +1,7 @@ -from config.histogram_colours import histogram_colours as colours -from config import XSectionConfig -from rootpy.plotting import Hist -from tools.ROOT_utils import get_histograms_from_trees, set_root_defaults -from tools.latex import setup_matplotlib -from uncertainties import ufloat -from math import sqrt -from copy import deepcopy +from dps.config.xsection import XSectionConfig +from dps.utils.ROOT_utils import set_root_defaults +from dps.utils.latex import setup_matplotlib from rootpy.io import File from rootpy import asrootpy from ROOT import TCanvas, kRed diff --git a/experimental/__init__.py b/dps/analysis/BLTUnfold/__init__.py similarity index 100% rename from experimental/__init__.py rename to dps/analysis/BLTUnfold/__init__.py diff --git a/src/BLTUnfold/getOutput.py b/dps/analysis/BLTUnfold/getOutput.py similarity index 100% rename from src/BLTUnfold/getOutput.py rename to dps/analysis/BLTUnfold/getOutput.py diff --git a/src/BLTUnfold/getScaleFactors.py b/dps/analysis/BLTUnfold/getScaleFactors.py similarity index 100% rename from src/BLTUnfold/getScaleFactors.py rename to dps/analysis/BLTUnfold/getScaleFactors.py diff --git a/src/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py similarity index 98% rename from src/BLTUnfold/produceUnfoldingHistograms.py rename to dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index 5b18cb0b..189f201f 100644 --- a/src/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -1,12 +1,11 @@ -from rootpy.tree import Tree -from rootpy.plotting import Hist, Hist2D, Canvas -from rootpy.io import root_open, File +from rootpy.plotting import Hist, Hist2D +from rootpy.io import root_open #from rootpy.interactive import wait from optparse import OptionParser -from config import XSectionConfig -from config.variable_binning import bin_edges_vis, reco_bin_edges_vis -from config.variableBranchNames import branchNames, genBranchNames_particle, genBranchNames_parton -from tools.file_utilities import make_folder_if_not_exists +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import bin_edges_vis, reco_bin_edges_vis +from dps.config.variableBranchNames import branchNames, genBranchNames_particle, genBranchNames_parton +from dps.utils.file_utilities import make_folder_if_not_exists from math import trunc from scaleFactors import * diff --git a/src/BLTUnfold/run.sh b/dps/analysis/BLTUnfold/run.sh similarity index 100% rename from src/BLTUnfold/run.sh rename to dps/analysis/BLTUnfold/run.sh diff --git a/src/BLTUnfold/runCondor.sh b/dps/analysis/BLTUnfold/runCondor.sh similarity index 78% rename from src/BLTUnfold/runCondor.sh rename to dps/analysis/BLTUnfold/runCondor.sh index abb92b5c..050cd6ec 100755 --- a/src/BLTUnfold/runCondor.sh +++ b/dps/analysis/BLTUnfold/runCondor.sh @@ -15,21 +15,21 @@ export PYTHONPATH=$PYTHONPATH:`pwd` echo "DailyPythonScripts are set up" -ls -l ${DPSROOT}/src/BLTUnfold/runJobsCrab.py -chmod a+x ${DPSROOT}/src/BLTUnfold/runJobsCrab.py -jobArguments=`${DPSROOT}/src/BLTUnfold/runJobsCrab.py --return_job_options -j $1` +ls -l ${DPSROOT}/dps/analysis/BLTUnfold/runJobsCrab.py +chmod a+x ${DPSROOT}/dps/analysis/BLTUnfold/runJobsCrab.py +jobArguments=`${DPSROOT}/dps/analysis/BLTUnfold/runJobsCrab.py --return_job_options -j $1` echo "Job arguments "$jobArguments if [[ $jobArguments == *"generatorWeight"* ]] then echo "Will copy input file locally" - hadoop fs -copyToLocal /TopQuarkGroup/run2/atOutput/13TeV/25ns/TTJets_PowhegPythia8_tree.root ${DPSROOT}/localInputFile.root + hadoop fs -copyToLocal /TopQuarkGroup/run2/atOutput/13TeV/2016/TTJets_PowhegPythia8_tree.root ${DPSROOT}/localInputFile.root fi echo "Running payload" >&2 echo "Running payload" mkdir -p unfolding/13TeV echo "Running script" -time python ${DPSROOT}/src/BLTUnfold/runJobsCrab.py -j $1 +time python ${DPSROOT}/dps/analysis/BLTUnfold/runJobsCrab.py -j $1 echo "Unfolding folder contents:" ls -l unfolding diff --git a/src/BLTUnfold/runJobsCrab.py b/dps/analysis/BLTUnfold/runJobsCrab.py similarity index 97% rename from src/BLTUnfold/runJobsCrab.py rename to dps/analysis/BLTUnfold/runJobsCrab.py index 30f870b3..0a92428e 100755 --- a/src/BLTUnfold/runJobsCrab.py +++ b/dps/analysis/BLTUnfold/runJobsCrab.py @@ -100,7 +100,7 @@ def main(options, args = []): parser = OptionParser() print 'Running job :',jobs[options.jobNumber-1] - os.system('python src/BLTUnfold/produceUnfoldingHistograms.py %s ' % jobs[options.jobNumber-1] ) + os.system('python dps/analysis/BLTUnfold/produceUnfoldingHistograms.py %s ' % jobs[options.jobNumber-1] ) if __name__ == '__main__': options, args = parse_args() diff --git a/src/BLTUnfold/runJobsInteractive.py b/dps/analysis/BLTUnfold/runJobsInteractive.py similarity index 100% rename from src/BLTUnfold/runJobsInteractive.py rename to dps/analysis/BLTUnfold/runJobsInteractive.py diff --git a/src/BLTUnfold/runNew.sh b/dps/analysis/BLTUnfold/runNew.sh similarity index 100% rename from src/BLTUnfold/runNew.sh rename to dps/analysis/BLTUnfold/runNew.sh diff --git a/src/BLTUnfold/scaleFactors.py b/dps/analysis/BLTUnfold/scaleFactors.py similarity index 100% rename from src/BLTUnfold/scaleFactors.py rename to dps/analysis/BLTUnfold/scaleFactors.py diff --git a/src/BLTUnfold/submitBLTUnfold.description b/dps/analysis/BLTUnfold/submitBLTUnfold.description similarity index 88% rename from src/BLTUnfold/submitBLTUnfold.description rename to dps/analysis/BLTUnfold/submitBLTUnfold.description index c2540238..5802cc9c 100644 --- a/src/BLTUnfold/submitBLTUnfold.description +++ b/dps/analysis/BLTUnfold/submitBLTUnfold.description @@ -1,4 +1,4 @@ -Executable = src/BLTUnfold/runCondor.sh +Executable = dps/analysis/BLTUnfold/runCondor.sh Universe = vanilla Output = bltUnfold.job.$(cluster).$(process).out Error = bltUnfold.job.$(cluster).$(process).err diff --git a/legacy/__init__.py b/dps/analysis/HLT_scripts_for_Sergeys_thesis/__init__.py similarity index 100% rename from legacy/__init__.py rename to dps/analysis/HLT_scripts_for_Sergeys_thesis/__init__.py diff --git a/src/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py b/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py similarity index 99% rename from src/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py rename to dps/analysis/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py index 33509012..b225ef2c 100644 --- a/src/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py +++ b/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py @@ -13,8 +13,8 @@ # import config.summations as summations from ROOT import TGraphAsymmErrors, TF1, TLegend, TLatex from array import array -from config import CMS -from tools.ROOT_utils import set_root_defaults +from dps.config import CMS +from dps.utils.ROOT_utils import set_root_defaults import matplotlib.cm as cm from matplotlib.ticker import FormatStrFormatter diff --git a/src/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py b/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py similarity index 99% rename from src/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py rename to dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py index eadcd888..81de15d3 100644 --- a/src/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py +++ b/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py @@ -13,8 +13,8 @@ # import config.summations as summations from ROOT import TGraphAsymmErrors, TF1 from array import array -from config import CMS -from tools.ROOT_utils import set_root_defaults +from dps.config import CMS +from dps.utils.ROOT_utils import set_root_defaults import matplotlib.cm as cm from matplotlib.ticker import FormatStrFormatter diff --git a/src/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py b/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py similarity index 99% rename from src/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py rename to dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py index 36d79fd2..61e0a728 100644 --- a/src/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py +++ b/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py @@ -13,8 +13,8 @@ # import config.summations as summations from ROOT import TGraphAsymmErrors, TF1 from array import array -from config import CMS -from tools.ROOT_utils import set_root_defaults +from dps.config import CMS +from dps.utils.ROOT_utils import set_root_defaults import matplotlib.cm as cm from matplotlib.ticker import FormatStrFormatter diff --git a/legacy/config/__init__.py b/dps/analysis/__init__.py similarity index 100% rename from legacy/config/__init__.py rename to dps/analysis/__init__.py diff --git a/src/check_CRAB_jobs.py b/dps/analysis/check_CRAB_jobs.py similarity index 98% rename from src/check_CRAB_jobs.py rename to dps/analysis/check_CRAB_jobs.py index 1ee8ed5e..54c3fb3d 100644 --- a/src/check_CRAB_jobs.py +++ b/dps/analysis/check_CRAB_jobs.py @@ -1,7 +1,7 @@ import os as os import sys as sys import re as re -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults from optparse import OptionParser import glob as glob from rootpy.io import File diff --git a/src/estimate_QCD_rate.py b/dps/analysis/estimate_QCD_rate.py similarity index 92% rename from src/estimate_QCD_rate.py rename to dps/analysis/estimate_QCD_rate.py index 95ad5a31..fe2d3852 100644 --- a/src/estimate_QCD_rate.py +++ b/dps/analysis/estimate_QCD_rate.py @@ -9,7 +9,7 @@ In addition to above it should provide an easy way to present the binned estimates (MET, b-tag, other bins) ''' -from tools.QCD_rate_estimation import estimate_with_fit_to_relative_isolation +from dps.utils.QCD_rate_estimation import estimate_with_fit_to_relative_isolation from rootpy.io import File path_to_files = '/storage/TopQuarkGroup/results/histogramfiles/AN-12-241_V4/' diff --git a/src/generate_QCD_template_from_data.py b/dps/analysis/generate_QCD_template_from_data.py similarity index 97% rename from src/generate_QCD_template_from_data.py rename to dps/analysis/generate_QCD_template_from_data.py index 9e4af609..6ea1efb7 100644 --- a/src/generate_QCD_template_from_data.py +++ b/dps/analysis/generate_QCD_template_from_data.py @@ -16,8 +16,8 @@ ''' from __future__ import division from rootpy.io import File -from tools.QCD_rate_estimation import estimate_with_fit_to_relative_isolation -from tools.ROOT_utils import set_root_defaults +from dps.utils.QCD_rate_estimation import estimate_with_fit_to_relative_isolation +from dps.utils.ROOT_utils import set_root_defaults # one template function per variable def get_electron_absolute_eta_templates(b_tag): global electron_data_file, met_bins diff --git a/src/grid.py b/dps/analysis/grid.py similarity index 87% rename from src/grid.py rename to dps/analysis/grid.py index 0602c8e9..df0feb5b 100644 --- a/src/grid.py +++ b/dps/analysis/grid.py @@ -4,8 +4,6 @@ @author: kreczko ''' from optparse import OptionParser -#import grid utilities -from tools.grid_utilities import fetch_grid_file, delete_grid_folder, remote_copy_folder def rm(filename, recursive = False): pass diff --git a/legacy/data/__init__.py b/dps/analysis/lepton_scale_factors/__init__.py similarity index 100% rename from legacy/data/__init__.py rename to dps/analysis/lepton_scale_factors/__init__.py diff --git a/src/lepton_scale_factors/measure_2011_hadron_leg.py b/dps/analysis/lepton_scale_factors/measure_2011_hadron_leg.py similarity index 97% rename from src/lepton_scale_factors/measure_2011_hadron_leg.py rename to dps/analysis/lepton_scale_factors/measure_2011_hadron_leg.py index 43234c37..1b87f80e 100644 --- a/src/lepton_scale_factors/measure_2011_hadron_leg.py +++ b/dps/analysis/lepton_scale_factors/measure_2011_hadron_leg.py @@ -1,8 +1,8 @@ from optparse import OptionParser -from read_BLT_ntuple import Particle, read_lepton_collections, match_four_momenta, get_parameters, set_parameter_limits, get_fitted_function_str +from .read_BLT_ntuple import Particle, read_lepton_collections, match_four_momenta, get_parameters, set_parameter_limits, get_fitted_function_str from ROOT import TGraphAsymmErrors, TF1 -from tools.ROOT_utils import set_root_defaults -from tools.file_utilities import make_folder_if_not_exists +from dps.utils.ROOT_utils import set_root_defaults +from dps.utils.file_utilities import make_folder_if_not_exists import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt from matplotlib.ticker import FormatStrFormatter @@ -16,7 +16,7 @@ from numpy import frompyfunc from pylab import plot -from config import CMS +from dps.config import CMS from matplotlib import rc rc( 'font', **CMS.font ) rc( 'text', usetex = True ) diff --git a/src/lepton_scale_factors/read_BLT_ntuple.py b/dps/analysis/lepton_scale_factors/read_BLT_ntuple.py similarity index 99% rename from src/lepton_scale_factors/read_BLT_ntuple.py rename to dps/analysis/lepton_scale_factors/read_BLT_ntuple.py index 8663ad39..4d7f5f70 100644 --- a/src/lepton_scale_factors/read_BLT_ntuple.py +++ b/dps/analysis/lepton_scale_factors/read_BLT_ntuple.py @@ -7,7 +7,7 @@ tag and probe studies to estimate single lepton trigger efficiency. ''' -from config import CMS +from dps.config import CMS from rootpy.io import File from rootpy import asrootpy, ROOTError from optparse import OptionParser @@ -23,10 +23,9 @@ import matplotlib.gridspec as gridspec from matplotlib.ticker import FormatStrFormatter, MultipleLocator from rootpy.plotting import Hist, Hist2D, Canvas, Efficiency -from tools.ROOT_utils import set_root_defaults -from tools.file_utilities import make_folder_if_not_exists -from tools.hist_utilities import hist_to_value_error_tuplelist -from tools.plotting import make_plot, Histogram_properties +from dps.utils.ROOT_utils import set_root_defaults +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.plotting import make_plot, Histogram_properties from ROOT import TLorentzVector, TGraphAsymmErrors, TF1, gPad, gStyle from ROOT import RooFit, RooDataHist, RooArgList, RooAddPdf, RooRealVar, RooBreitWigner, RooExponential, RooFFTConvPdf, RooCBShape diff --git a/src/make_CRAB_configuration.py b/dps/analysis/make_CRAB_configuration.py similarity index 100% rename from src/make_CRAB_configuration.py rename to dps/analysis/make_CRAB_configuration.py diff --git a/src/make_HLT_plots.py b/dps/analysis/make_HLT_plots.py similarity index 99% rename from src/make_HLT_plots.py rename to dps/analysis/make_HLT_plots.py index a3a1332a..3604c999 100644 --- a/src/make_HLT_plots.py +++ b/dps/analysis/make_HLT_plots.py @@ -5,11 +5,11 @@ import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt # from matplotlib.ticker import AutoMinorLocator -# import config.summations as summations +# import dps.config.summations as summations from ROOT import TEfficiency, TGraphAsymmErrors, TF1, TLegend, TLatex from array import array -from config import CMS -from tools.ROOT_utils import set_root_defaults +from dps.config import CMS +from dps.utils.ROOT_utils import set_root_defaults import matplotlib.gridspec as gridspec from matplotlib.ticker import MultipleLocator, FormatStrFormatter diff --git a/src/make_ntuples_CRAB_configurations.sh b/dps/analysis/make_ntuples_CRAB_configurations.sh similarity index 100% rename from src/make_ntuples_CRAB_configurations.sh rename to dps/analysis/make_ntuples_CRAB_configurations.sh diff --git a/src/make_unfolding_CRAB_configurations.sh b/dps/analysis/make_unfolding_CRAB_configurations.sh similarity index 100% rename from src/make_unfolding_CRAB_configurations.sh rename to dps/analysis/make_unfolding_CRAB_configurations.sh diff --git a/src/read_processed_events.py b/dps/analysis/read_processed_events.py similarity index 100% rename from src/read_processed_events.py rename to dps/analysis/read_processed_events.py diff --git a/src/read_skim_information.py b/dps/analysis/read_skim_information.py similarity index 100% rename from src/read_skim_information.py rename to dps/analysis/read_skim_information.py diff --git a/legacy/dev/__init__.py b/dps/analysis/search/__init__.py similarity index 100% rename from legacy/dev/__init__.py rename to dps/analysis/search/__init__.py diff --git a/src/search/test.root b/dps/analysis/search/test.root similarity index 100% rename from src/search/test.root rename to dps/analysis/search/test.root diff --git a/src/search/translate_results_to_theta.py b/dps/analysis/search/translate_results_to_theta.py similarity index 98% rename from src/search/translate_results_to_theta.py rename to dps/analysis/search/translate_results_to_theta.py index f1bd44e3..1975c1d8 100644 --- a/src/search/translate_results_to_theta.py +++ b/dps/analysis/search/translate_results_to_theta.py @@ -26,9 +26,9 @@ from math import sqrt from rootpy.io import root_open -from tools.file_utilities import read_data_from_JSON -from tools.hist_utilities import value_error_tuplelist_to_hist -from config import XSectionConfig +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.hist_utilities import value_error_tuplelist_to_hist +from dps.config.xsection import XSectionConfig def get_variable_from(variable='MET', path_to_JSON='data/8TeV', category='central', signal='Higgs', measurement_type='unfolded'): global met_type diff --git a/src/search/validate_systematic_method.py b/dps/analysis/search/validate_systematic_method.py similarity index 96% rename from src/search/validate_systematic_method.py rename to dps/analysis/search/validate_systematic_method.py index c8c7f0d2..7c21676c 100644 --- a/src/search/validate_systematic_method.py +++ b/dps/analysis/search/validate_systematic_method.py @@ -2,8 +2,8 @@ import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt from matplotlib.ticker import AutoMinorLocator -from config import CMS -from tools.hist_utilities import value_error_tuplelist_to_hist, hist_to_value_error_tuplelist +from dps.config import CMS +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, hist_to_value_error_tuplelist def normalise(histograms): for histogram in histograms: diff --git a/src/trash/README b/dps/analysis/trash/README similarity index 100% rename from src/trash/README rename to dps/analysis/trash/README diff --git a/src/unfolding_tests/README.md b/dps/analysis/unfolding_tests/README.md similarity index 100% rename from src/unfolding_tests/README.md rename to dps/analysis/unfolding_tests/README.md diff --git a/legacy/tools/__init__.py b/dps/analysis/unfolding_tests/__init__.py similarity index 100% rename from legacy/tools/__init__.py rename to dps/analysis/unfolding_tests/__init__.py diff --git a/src/unfolding_tests/analyse_unfolding_pulls.py b/dps/analysis/unfolding_tests/analyse_unfolding_pulls.py similarity index 95% rename from src/unfolding_tests/analyse_unfolding_pulls.py rename to dps/analysis/unfolding_tests/analyse_unfolding_pulls.py index 2eaad86d..26d908ea 100644 --- a/src/unfolding_tests/analyse_unfolding_pulls.py +++ b/dps/analysis/unfolding_tests/analyse_unfolding_pulls.py @@ -13,12 +13,11 @@ from rootpy import asrootpy import rootpy.plotting.root2matplotlib as rplt # DPS includes -from src.unfolding_tests.make_unfolding_pull_plots import get_data, \ - plot_pull -from config import XSectionConfig, CMS -from config.variable_binning import bin_edges_full -from tools.ROOT_utils import set_root_defaults -from tools.file_utilities import make_folder_if_not_exists +from .make_unfolding_pull_plots import get_data, plot_pull +from dps.config import CMS +from dps.config.variable_binning import bin_edges_full +from dps.utils.ROOT_utils import set_root_defaults +from dps.utils.file_utilities import make_folder_if_not_exists def main(): diff --git a/src/unfolding_tests/bias_test.py b/dps/analysis/unfolding_tests/bias_test.py similarity index 92% rename from src/unfolding_tests/bias_test.py rename to dps/analysis/unfolding_tests/bias_test.py index 5a0369b5..8fe5446e 100644 --- a/src/unfolding_tests/bias_test.py +++ b/dps/analysis/unfolding_tests/bias_test.py @@ -4,11 +4,10 @@ ''' from rootpy.io import File -from config.variable_binning import bin_edges_vis -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -from config.cross_section_config import XSectionConfig -from tools.plotting import compare_measurements, Histogram_properties -from config import latex_labels +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.config.xsection import XSectionConfig +from dps.utils.plotting import compare_measurements, Histogram_properties +from dps.config import latex_labels def main(): diff --git a/src/unfolding_tests/closure_test.py b/dps/analysis/unfolding_tests/closure_test.py similarity index 93% rename from src/unfolding_tests/closure_test.py rename to dps/analysis/unfolding_tests/closure_test.py index c62660e1..ae2c2645 100644 --- a/src/unfolding_tests/closure_test.py +++ b/dps/analysis/unfolding_tests/closure_test.py @@ -4,17 +4,16 @@ ''' from rootpy.io import File -from config.variable_binning import bin_edges_vis, bin_widths_visiblePS -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -from tools.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist -from tools.Calculation import calculate_normalised_xsection -from config.cross_section_config import XSectionConfig -from tools.plotting import compare_measurements, Histogram_properties -from config import latex_labels +from dps.config.variable_binning import bin_edges_vis, bin_widths_visiblePS +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.utils.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist +from dps.utils.Calculation import calculate_normalised_xsection +from dps.config.xsection import XSectionConfig +from dps.utils.plotting import compare_measurements, Histogram_properties +from dps.config import latex_labels from rootpy import asrootpy from collections import OrderedDict -from tools.latex import setup_matplotlib -from math import sqrt +from dps.utils.latex import setup_matplotlib # latex, font, etc setup_matplotlib() def main(): diff --git a/src/unfolding_tests/compare_reweighting.py b/dps/analysis/unfolding_tests/compare_reweighting.py similarity index 92% rename from src/unfolding_tests/compare_reweighting.py rename to dps/analysis/unfolding_tests/compare_reweighting.py index e4dd7bfc..94b10812 100644 --- a/src/unfolding_tests/compare_reweighting.py +++ b/dps/analysis/unfolding_tests/compare_reweighting.py @@ -1,13 +1,13 @@ -from tools.file_utilities import read_data_from_JSON -from tools.Unfolding import get_unfold_histogram_tuple, removeFakes +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.Unfolding import get_unfold_histogram_tuple, removeFakes from rootpy.io import File from rootpy import asrootpy -from tools.hist_utilities import value_error_tuplelist_to_hist -from config.cross_section_config import XSectionConfig -from config.variable_binning import reco_bin_edges_vis -from tools.plotting import compare_measurements, Histogram_properties -from config import latex_labels +from dps.utils.hist_utilities import value_error_tuplelist_to_hist +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import reco_bin_edges_vis +from dps.utils.plotting import compare_measurements, Histogram_properties +from dps.config import latex_labels diff --git a/src/unfolding_tests/compare_unfolding_parameters.py b/dps/analysis/unfolding_tests/compare_unfolding_parameters.py similarity index 94% rename from src/unfolding_tests/compare_unfolding_parameters.py rename to dps/analysis/unfolding_tests/compare_unfolding_parameters.py index f7c5d278..c3730165 100644 --- a/src/unfolding_tests/compare_unfolding_parameters.py +++ b/dps/analysis/unfolding_tests/compare_unfolding_parameters.py @@ -16,18 +16,18 @@ from optparse import OptionParser from copy import deepcopy from ROOT import TH1F -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults import collections from rootpy.io import File -from config import latex_labels, XSectionConfig -from config.variable_binning import bin_widths, bin_edges_vis -from tools.plotting import Histogram_properties, compare_measurements -from tools.Unfolding import get_unfold_histogram_tuple, Unfolding -from tools.Calculation import calculate_normalised_xsection -from tools.hist_utilities import hist_to_value_error_tuplelist, get_fit_results_histogram -from tools.hist_utilities import value_error_tuplelist_to_hist, spread_x -from tools.file_utilities import make_folder_if_not_exists +from dps.config import latex_labels +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import bin_edges_vis +from dps.utils.plotting import Histogram_properties, compare_measurements +from dps.utils.Unfolding import get_unfold_histogram_tuple, Unfolding +from dps.utils.hist_utilities import get_fit_results_histogram +from dps.utils.hist_utilities import spread_x +from dps.utils.file_utilities import make_folder_if_not_exists def get_test_k_values( h_truth, h_measured, h_response, h_data = None ): """ diff --git a/src/unfolding_tests/create_toy_mc.py b/dps/analysis/unfolding_tests/create_toy_mc.py similarity index 92% rename from src/unfolding_tests/create_toy_mc.py rename to dps/analysis/unfolding_tests/create_toy_mc.py index 5dff5d73..8a12c7dc 100644 --- a/src/unfolding_tests/create_toy_mc.py +++ b/dps/analysis/unfolding_tests/create_toy_mc.py @@ -14,10 +14,10 @@ ''' from optparse import OptionParser from rootpy.io import File -from config import XSectionConfig -from tools.ROOT_utils import set_root_defaults +from dps.config.xsection import XSectionConfig +from dps.utils.ROOT_utils import set_root_defaults from rootpy.io.file import root_open -from config.variable_binning import bin_edges_vis +from dps.config.variable_binning import bin_edges_vis from rootpy import asrootpy @@ -63,9 +63,9 @@ def main(): def create_toy_mc(input_file, sample, output_folder, n_toy, centre_of_mass, ttbar_xsection): - from tools.file_utilities import make_folder_if_not_exists - from tools.toy_mc import generate_toy_MC_from_distribution, generate_toy_MC_from_2Ddistribution - from tools.Unfolding import get_unfold_histogram_tuple + from dps.utils.file_utilities import make_folder_if_not_exists + from dps.utils.toy_mc import generate_toy_MC_from_distribution, generate_toy_MC_from_2Ddistribution + from dps.utils.Unfolding import get_unfold_histogram_tuple make_folder_if_not_exists(output_folder) input_file_hists = File(input_file) output_file_name = get_output_file_name(output_folder, sample, n_toy, centre_of_mass) diff --git a/src/unfolding_tests/create_toy_mc_from_tree.py b/dps/analysis/unfolding_tests/create_toy_mc_from_tree.py similarity index 97% rename from src/unfolding_tests/create_toy_mc_from_tree.py rename to dps/analysis/unfolding_tests/create_toy_mc_from_tree.py index 253a0dcc..6fc3d066 100644 --- a/src/unfolding_tests/create_toy_mc_from_tree.py +++ b/dps/analysis/unfolding_tests/create_toy_mc_from_tree.py @@ -7,18 +7,18 @@ import sys import os -import math from optparse import OptionParser -from config.cross_section_config import XSectionConfig +from dps.config.xsection import XSectionConfig from rootpy.io.file import root_open -from config.variable_binning import bin_edges, bin_edges_vis +# @BROKEN +from dps.config.variable_binning import bin_edges, bin_edges_vis import numpy as np -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults from rootpy.plotting.hist import Hist, Hist2D -from config.variableBranchNames import branchNames, genBranchNames_particle,\ +from dps.config.variableBranchNames import branchNames, genBranchNames_particle,\ genBranchNames_parton -from tools.file_utilities import make_folder_if_not_exists +from dps.utils.file_utilities import make_folder_if_not_exists __all__ = [] __version__ = 0.1 __date__ = '2015-08-03' @@ -354,7 +354,7 @@ def get_output_file_name(output_folder, n_toy, start_at, n_input_mc, centre_of_m import cProfile import pstats from guppy import hpy - profile_filename = 'src.unfolding_tests.create_toy_mc_from_tree_profile.txt' + profile_filename = 'dps.analysis.unfolding_tests.create_toy_mc_from_tree_profile.txt' cProfile.run('main()', profile_filename) statsfile = open("profile_stats.txt", "wb") p = pstats.Stats(profile_filename, stream=statsfile) diff --git a/src/unfolding_tests/create_unfolding_pull_data.py b/dps/analysis/unfolding_tests/create_unfolding_pull_data.py similarity index 96% rename from src/unfolding_tests/create_unfolding_pull_data.py rename to dps/analysis/unfolding_tests/create_unfolding_pull_data.py index ab3cec3f..52085550 100644 --- a/src/unfolding_tests/create_unfolding_pull_data.py +++ b/dps/analysis/unfolding_tests/create_unfolding_pull_data.py @@ -7,15 +7,14 @@ from optparse import OptionParser from rootpy.io import File from rootpy import asrootpy -from tools.Unfolding import Unfolding -from tools.hist_utilities import hist_to_value_error_tuplelist -from tools.file_utilities import write_data_to_JSON, make_folder_if_not_exists -from tools.Timer import Timer -from math import sqrt, log10 +from dps.utils.Unfolding import Unfolding +from dps.utils.hist_utilities import hist_to_value_error_tuplelist +from dps.utils.file_utilities import write_data_to_JSON, make_folder_if_not_exists +from dps.utils.Timer import Timer from time import time -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults -from config import XSectionConfig +from dps.config.xsection import XSectionConfig def main(): diff --git a/src/unfolding_tests/getBestTau.py b/dps/analysis/unfolding_tests/getBestTau.py similarity index 100% rename from src/unfolding_tests/getBestTau.py rename to dps/analysis/unfolding_tests/getBestTau.py diff --git a/src/unfolding_tests/get_best_regularisation.py b/dps/analysis/unfolding_tests/get_best_regularisation.py similarity index 96% rename from src/unfolding_tests/get_best_regularisation.py rename to dps/analysis/unfolding_tests/get_best_regularisation.py index fea06067..0a0e60b4 100644 --- a/src/unfolding_tests/get_best_regularisation.py +++ b/dps/analysis/unfolding_tests/get_best_regularisation.py @@ -29,19 +29,18 @@ # rootpy from rootpy.io import File # DailyPythonScripts -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -#from src.cross_section_measurement.lib import get_unfold_histogram_tuple -from tools.ROOT_utils import set_root_defaults, get_histogram_from_file -from config import XSectionConfig -from config.variable_binning import bin_edges_full, bin_edges_vis -from tools.hist_utilities import value_error_tuplelist_to_hist -from tools.table import PrintTable +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +#from dps.analysis.xsection.lib import get_unfold_histogram_tuple +from dps.utils.ROOT_utils import set_root_defaults, get_histogram_from_file +from dps.config.xsection import XSectionConfig, CMS +from dps.config.variable_binning import bin_edges_full, bin_edges_vis +from dps.utils.hist_utilities import value_error_tuplelist_to_hist +from dps.utils.table import PrintTable import matplotlib.pyplot as plt -from tools.plotting import Histogram_properties +from dps.utils.plotting import Histogram_properties from matplotlib import rc -from config import CMS -from config.latex_labels import variables_latex +from dps.config.latex_labels import variables_latex rc('font',**CMS.font) rc( 'text', usetex = True ) diff --git a/src/unfolding_tests/get_best_regularisation_TUnfold.py b/dps/analysis/unfolding_tests/get_best_regularisation_TUnfold.py similarity index 93% rename from src/unfolding_tests/get_best_regularisation_TUnfold.py rename to dps/analysis/unfolding_tests/get_best_regularisation_TUnfold.py index 68de7546..0e92dc30 100644 --- a/src/unfolding_tests/get_best_regularisation_TUnfold.py +++ b/dps/analysis/unfolding_tests/get_best_regularisation_TUnfold.py @@ -19,28 +19,22 @@ ''' # imports from __future__ import division -from math import log10, pow from optparse import OptionParser import sys # rootpy from rootpy.io import File -from rootpy.plotting import Graph, Canvas -from rootpy.matrix import Matrix +from rootpy.plotting import Graph # DailyPythonScripts -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes -#from src.cross_section_measurement.lib import get_unfold_histogram_tuple -from tools.ROOT_utils import set_root_defaults, get_histogram_from_file -from config import XSectionConfig -from config.variable_binning import reco_bin_edges_vis -from tools.hist_utilities import value_error_tuplelist_to_hist -import matplotlib.pyplot as plt -from tools.plotting import Histogram_properties +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes +#from dps.analysis.xsection.lib import get_unfold_histogram_tuple +from dps.utils.ROOT_utils import set_root_defaults, get_histogram_from_file +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import reco_bin_edges_vis +from dps.utils.hist_utilities import value_error_tuplelist_to_hist from matplotlib import rc -from config import CMS -from config.latex_labels import variables_latex +from dps.config import CMS from ROOT import TGraph, TSpline3, Double, TUnfoldDensity, TUnfold, TDecompSVD, TMatrixD, TCanvas, gROOT -from numpy.linalg import svd from rootpy import asrootpy rc('font',**CMS.font) diff --git a/src/unfolding_tests/k_value_determination.py b/dps/analysis/unfolding_tests/k_value_determination.py similarity index 97% rename from src/unfolding_tests/k_value_determination.py rename to dps/analysis/unfolding_tests/k_value_determination.py index f7be00c1..fa6c103f 100644 --- a/src/unfolding_tests/k_value_determination.py +++ b/dps/analysis/unfolding_tests/k_value_determination.py @@ -27,13 +27,14 @@ import matplotlib.pyplot as plt from copy import deepcopy -from tools.file_utilities import make_folder_if_not_exists -from tools.hist_utilities import value_error_tuplelist_to_hist, get_fit_results_histogram -from tools.ROOT_utils import set_root_defaults -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -from config.variable_binning import bin_edges_full -from config import CMS, XSectionConfig -from config.latex_labels import variables_latex +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.hist_utilities import get_fit_results_histogram +from dps.utils.ROOT_utils import set_root_defaults +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.config.variable_binning import bin_edges_full +from dps.config import CMS +from dps.config.xsection import XSectionConfig +from dps.config.latex_labels import variables_latex matplotlib.rc('font',**CMS.font) diff --git a/src/unfolding_tests/k_value_optimisation_plots.py b/dps/analysis/unfolding_tests/k_value_optimisation_plots.py similarity index 94% rename from src/unfolding_tests/k_value_optimisation_plots.py rename to dps/analysis/unfolding_tests/k_value_optimisation_plots.py index 6ca814e0..0e34b60c 100644 --- a/src/unfolding_tests/k_value_optimisation_plots.py +++ b/dps/analysis/unfolding_tests/k_value_optimisation_plots.py @@ -14,15 +14,16 @@ from rootpy.io import File import matplotlib from copy import deepcopy -from tools.ROOT_utils import set_root_defaults - -from tools.file_utilities import make_folder_if_not_exists -from tools.hist_utilities import get_fit_results_histogram -from tools.plotting import make_plot, Histogram_properties -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -from config.variable_binning import bin_edges_vis -from config import CMS, XSectionConfig -from config.latex_labels import variables_latex +from dps.utils.ROOT_utils import set_root_defaults + +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.hist_utilities import get_fit_results_histogram +from dps.utils.plotting import make_plot, Histogram_properties +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.config.variable_binning import bin_edges_vis +from dps.config import CMS +from dps.config.xsection import XSectionConfig +from dps.config.latex_labels import variables_latex matplotlib.use('agg') matplotlib.rc('font',**CMS.font) diff --git a/src/unfolding_tests/makeConfig.py b/dps/analysis/unfolding_tests/makeConfig.py similarity index 94% rename from src/unfolding_tests/makeConfig.py rename to dps/analysis/unfolding_tests/makeConfig.py index ea36db07..8f28c057 100644 --- a/src/unfolding_tests/makeConfig.py +++ b/dps/analysis/unfolding_tests/makeConfig.py @@ -1,7 +1,6 @@ import json -from config import XSectionConfig -from config.variable_binning import bin_edges_full, bin_edges_vis -from tools.file_utilities import make_folder_if_not_exists +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists com = 13 fitVars = "M3_angle_bl" diff --git a/src/unfolding_tests/make_unfolding_pull_plots.py b/dps/analysis/unfolding_tests/make_unfolding_pull_plots.py similarity index 97% rename from src/unfolding_tests/make_unfolding_pull_plots.py rename to dps/analysis/unfolding_tests/make_unfolding_pull_plots.py index f00d6fee..de46c7ec 100644 --- a/src/unfolding_tests/make_unfolding_pull_plots.py +++ b/dps/analysis/unfolding_tests/make_unfolding_pull_plots.py @@ -16,13 +16,12 @@ python src/unfolding_tests/make_unfolding_pull_plots.py data/pull_data/13TeV/HT/electron/*.txt -s 13 -c electron -o plots/pull_plots/ -v HT ''' from __future__ import division, print_function -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults from optparse import OptionParser -from glob import glob import sys import matplotlib as mpl -from tools.plotting import Histogram_properties, compare_measurements +from dps.utils.plotting import Histogram_properties, compare_measurements mpl.use('agg') import numpy @@ -36,11 +35,12 @@ from math import sqrt -from config.variable_binning import bin_edges_vis -from config import CMS, latex_labels, XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.hist_utilities import value_error_tuplelist_to_hist, make_line_hist -from tools.latex import setup_matplotlib +from dps.config.variable_binning import bin_edges_vis +from dps.config import CMS, latex_labels +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, make_line_hist +from dps.utils.latex import setup_matplotlib setup_matplotlib() diff --git a/src/unfolding_tests/unfolding_sandbox.py b/dps/analysis/unfolding_tests/unfolding_sandbox.py similarity index 91% rename from src/unfolding_tests/unfolding_sandbox.py rename to dps/analysis/unfolding_tests/unfolding_sandbox.py index d377aacb..7324979a 100644 --- a/src/unfolding_tests/unfolding_sandbox.py +++ b/dps/analysis/unfolding_tests/unfolding_sandbox.py @@ -5,12 +5,8 @@ ''' from rootpy.io import File -from config.variable_binning import bin_edges_vis -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes -from config.cross_section_config import XSectionConfig -from tools.plotting import compare_measurements, Histogram_properties -from config import latex_labels -from tools.ROOT_utils import set_root_defaults +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.config.xsection import XSectionConfig from rootpy import asrootpy def main(): diff --git a/src/cross_section_measurement/00_pick_bins.py b/dps/analysis/xsection/00_pick_bins.py similarity index 97% rename from src/cross_section_measurement/00_pick_bins.py rename to dps/analysis/xsection/00_pick_bins.py index 405c2507..d24549ef 100644 --- a/src/cross_section_measurement/00_pick_bins.py +++ b/dps/analysis/xsection/00_pick_bins.py @@ -42,15 +42,15 @@ from __future__ import print_function from rootpy import asrootpy from rootpy.io import File -from tools.Calculation import calculate_purities, calculate_stabilities -from tools.hist_utilities import rebin_2d -from config import XSectionConfig +from dps.utils.Calculation import calculate_purities, calculate_stabilities +from dps.utils.hist_utilities import rebin_2d +from dps.config.xsection import XSectionConfig from optparse import OptionParser -from config.variable_binning import bin_edges_full, minimum_bin_width -from tools.file_utilities import write_data_to_JSON +from dps.config.variable_binning import bin_edges_full, minimum_bin_width +from dps.utils.file_utilities import write_data_to_JSON from ROOT import TH1, TCanvas, TLine, gDirectory, TObjArray, TColor, TLegend -import tools.resolution as rs +import dps.utils.resolution as rs def main(): ''' @@ -267,7 +267,7 @@ def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_m def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width ): current_bin_start = bin_start current_bin_end = bin_end - + p, s = 0, 0 for gen_vs_reco_histogram in histograms: reco = asrootpy( gen_vs_reco_histogram.ProjectionX() ) gen = asrootpy( gen_vs_reco_histogram.ProjectionY( 'py', 1 ) ) diff --git a/src/cross_section_measurement/01_get_fit_results.py b/dps/analysis/xsection/01_get_fit_results.py similarity index 98% rename from src/cross_section_measurement/01_get_fit_results.py rename to dps/analysis/xsection/01_get_fit_results.py index 6570c5e7..e011cc65 100644 --- a/src/cross_section_measurement/01_get_fit_results.py +++ b/dps/analysis/xsection/01_get_fit_results.py @@ -5,16 +5,15 @@ # rootpy from rootpy.io import File # DailyPythonScripts -from config.summations_common import b_tag_summations -from config.variable_binning import variable_bins_ROOT, fit_variable_bin_edges -from config import XSectionConfig - -from tools.Calculation import decombine_result, combine_complex_results -from tools.Fitting import Minuit, RooFitFit, FitData, FitDataCollection -from tools.file_utilities import write_data_to_JSON -from tools.ROOT_utils import set_root_defaults, get_histograms_from_trees -from tools.hist_utilities import clean_control_region, adjust_overflow_to_limit, get_data_derived_qcd -from lib import closure_tests +from dps.config.summations_common import b_tag_summations +from dps.config.variable_binning import variable_bins_ROOT, fit_variable_bin_edges +from dps.config.xsection import XSectionConfig + +from dps.utils.Calculation import combine_complex_results +from dps.utils.Fitting import Minuit, RooFitFit, FitData, FitDataCollection +from dps.utils.file_utilities import write_data_to_JSON +from dps.utils.ROOT_utils import set_root_defaults, get_histograms_from_trees +from dps.analysis.xsection.lib import closure_tests def get_histograms( channel, input_files, variable, met_systematic, met_type, variable_bin, b_tag_bin, diff --git a/src/cross_section_measurement/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py similarity index 95% rename from src/cross_section_measurement/01_get_ttjet_normalisation.py rename to dps/analysis/xsection/01_get_ttjet_normalisation.py index 290cd79c..d034a85f 100644 --- a/src/cross_section_measurement/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -17,19 +17,18 @@ ''' from __future__ import division from optparse import OptionParser -# from tools.ROOT_utils import set_root_defaults, get_histogram_from_file -import tools.ROOT_utils -from tools.logger import log -from config import XSectionConfig -from src.cross_section_measurement.lib import closure_tests -from tools.file_utilities import write_data_to_JSON -from tools.hist_utilities import clean_control_region, \ +from dps.utils.logger import log +from dps.config.xsection import XSectionConfig +from dps.analysis.xsection.lib import closure_tests +from dps.utils.file_utilities import write_data_to_JSON +from dps.utils.hist_utilities import clean_control_region, \ hist_to_value_error_tuplelist, fix_overflow import glob -import tools.measurement from copy import deepcopy -from tools.Calculation import combine_complex_results +from dps.utils.Calculation import combine_complex_results +from dps.utils.measurement import Measurement +from dps.utils.ROOT_utils import set_root_defaults # define logger for this module mylog = log["01b_get_ttjet_normalisation"] @@ -132,7 +131,7 @@ def background_subtraction(self, histograms): @mylog.trace() def simultaneous_fit(self, histograms): - from tools.Fitting import FitData, FitDataCollection, Minuit + from dps.utils.Fitting import FitData, FitDataCollection, Minuit print('not in production yet') fitter = None fit_data_collection = FitDataCollection() @@ -273,7 +272,7 @@ def main(): measurement_files = glob.glob(input_template.format(**inputs)) for f in sorted(measurement_files): print('Processing file ' + f) - measurement = tools.measurement.Measurement.fromJSON(f) + measurement = Measurement.fromJSON(f) # for each measurement norm = TTJetNormalisation( config=measurement_config, @@ -312,7 +311,7 @@ def get_category_from_file(json_file): return category if __name__ == '__main__': - tools.ROOT_utils.set_root_defaults() + set_root_defaults() options, args = parse_options() diff --git a/src/cross_section_measurement/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py similarity index 98% rename from src/cross_section_measurement/02_unfold_and_measure.py rename to dps/analysis/xsection/02_unfold_and_measure.py index 9ce0117d..5f501985 100644 --- a/src/cross_section_measurement/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -3,20 +3,20 @@ from optparse import OptionParser # from array import array # rootpy -from rootpy.io import File, root_open +from rootpy.io import File from rootpy.plotting import Hist2D # DailyPythonScripts -import config.unfold as unfoldCfg -from config.variable_binning import bin_widths, bin_widths_visiblePS, reco_bin_edges_full, reco_bin_edges_vis -from config import XSectionConfig -from tools.Calculation import calculate_xsection, calculate_normalised_xsection, \ +import dps.config.unfold as unfoldCfg +from dps.config.variable_binning import bin_widths, bin_widths_visiblePS, reco_bin_edges_full, reco_bin_edges_vis +from dps.config.xsection import XSectionConfig +from dps.utils.Calculation import calculate_xsection, calculate_normalised_xsection, \ combine_complex_results -from tools.hist_utilities import hist_to_value_error_tuplelist, \ +from dps.utils.hist_utilities import hist_to_value_error_tuplelist, \ value_error_tuplelist_to_hist -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes -from tools.file_utilities import read_data_from_JSON, write_data_to_JSON +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes +from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON from copy import deepcopy -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults # from ROOT import TGraph, TSpline3, TUnfoldDensity def unfold_results( results, category, channel, tau_value, h_truth, h_measured, h_response, h_fakes, method, visiblePS ): diff --git a/src/cross_section_measurement/03_calculate_systematics.py b/dps/analysis/xsection/03_calculate_systematics.py similarity index 94% rename from src/cross_section_measurement/03_calculate_systematics.py rename to dps/analysis/xsection/03_calculate_systematics.py index 69abff6b..fd5c03be 100644 --- a/src/cross_section_measurement/03_calculate_systematics.py +++ b/dps/analysis/xsection/03_calculate_systematics.py @@ -16,10 +16,14 @@ 3) + 4) for more fine-grained analysis ''' from optparse import OptionParser -from config import XSectionConfig -from config.variable_binning import bin_edges_vis -from tools.systematic import * -from tools.file_utilities import make_folder_if_not_exists +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import bin_edges_vis +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.systematic import append_PDF_uncertainties, print_dictionary,\ + get_normalised_cross_sections, get_symmetrised_systematic_uncertainty,\ + generate_covariance_matrices,\ + get_measurement_with_total_systematic_uncertainty,\ + write_normalised_xsection_measurement if __name__ == '__main__': ''' diff --git a/src/cross_section_measurement/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py similarity index 98% rename from src/cross_section_measurement/04_make_plots_matplotlib.py rename to dps/analysis/xsection/04_make_plots_matplotlib.py index c68523d7..01370fbf 100644 --- a/src/cross_section_measurement/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -4,33 +4,32 @@ import os, gc from copy import deepcopy -from config.latex_labels import variables_latex, measurements_latex, \ -b_tag_bins_latex, fit_variables_latex -from config.variable_binning import bin_edges_full, variable_bins_ROOT, variable_bins_visiblePS_ROOT, fit_variable_bin_edges,\ +from dps.config.latex_labels import variables_latex, measurements_latex, fit_variables_latex +from dps.config.variable_binning import bin_edges_full, variable_bins_ROOT, variable_bins_visiblePS_ROOT, fit_variable_bin_edges,\ bin_edges_vis -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.hist_utilities import value_error_tuplelist_to_hist, \ +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, \ value_tuplelist_to_hist, value_errors_tuplelist_to_graph, graph_to_value_errors_tuplelist from math import sqrt # rootpy & matplotlib from ROOT import kRed, kGreen, kMagenta, kBlue, kBlack -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults import matplotlib as mpl -from tools.plotting import get_best_max_y +from dps.utils.plotting import get_best_max_y mpl.use( 'agg' ) import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec from matplotlib.ticker import MultipleLocator -from config import CMS -from tools.latex import setup_matplotlib +from dps.config import CMS +from dps.utils.latex import setup_matplotlib # latex, font, etc setup_matplotlib() import matplotlib.patches as mpatches -from tools.logger import log +from dps.utils.logger import log xsec_04_log = log["src/cross_section_measurement/04_make_plots_matplotlib"] @xsec_04_log.trace() @@ -287,7 +286,7 @@ def make_template_plots( histograms, category, channel ): @xsec_04_log.trace() def plot_fit_results( histograms, category, channel ): global variable, b_tag_bin, output_folder, phase_space - from tools.plotting import Histogram_properties, make_data_mc_comparison_plot + from dps.utils.plotting import Histogram_properties, make_data_mc_comparison_plot fit_variables = histograms.keys() variableBins = None diff --git a/src/cross_section_measurement/05_make_systematic_plots.py b/dps/analysis/xsection/05_make_systematic_plots.py similarity index 100% rename from src/cross_section_measurement/05_make_systematic_plots.py rename to dps/analysis/xsection/05_make_systematic_plots.py diff --git a/src/cross_section_measurement/05_make_tables.py b/dps/analysis/xsection/05_make_tables.py similarity index 97% rename from src/cross_section_measurement/05_make_tables.py rename to dps/analysis/xsection/05_make_tables.py index d42ce6d7..033b5411 100644 --- a/src/cross_section_measurement/05_make_tables.py +++ b/dps/analysis/xsection/05_make_tables.py @@ -1,13 +1,13 @@ from __future__ import division # the result of the division will be always a float from optparse import OptionParser from copy import deepcopy -from config.latex_labels import variables_latex, variables_NonLatex, measurements_latex, samples_latex, typical_systematics_latex, met_systematics_latex -from config.variable_binning import variable_bins_latex, variable_bins_ROOT, variable_bins_visiblePS_ROOT, variable_bins_visiblePS_latex, bin_edges_vis, bin_edges_full -from config import XSectionConfig -from tools.Calculation import getRelativeError -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists, read_xsection_measurement_results_with_errors -from tools.hist_utilities import values_and_errors_to_hist -from lib import read_normalisation, read_initial_normalisation +from dps.config.latex_labels import variables_latex, variables_NonLatex, measurements_latex, samples_latex, typical_systematics_latex, met_systematics_latex +from dps.config.variable_binning import variable_bins_latex, variable_bins_ROOT, variable_bins_visiblePS_ROOT, variable_bins_visiblePS_latex, bin_edges_vis, bin_edges_full +from dps.config.xsection import XSectionConfig +from dps.utils.Calculation import getRelativeError +from dps.utils.file_utilities import make_folder_if_not_exists, read_xsection_measurement_results_with_errors +from dps.utils.hist_utilities import values_and_errors_to_hist +from dps.analysis.xsection.lib import read_normalisation, read_initial_normalisation import math import os.path from numpy import median @@ -15,7 +15,7 @@ mpl.use( 'agg' ) import matplotlib.pyplot as plt import rootpy.plotting.root2matplotlib as rplt -from config import CMS +from dps.config import CMS import matplotlib.cm as cm # use full stpectrum, yet use white for less than vmin=1 events my_cmap = cm.get_cmap( 'jet' ) diff --git a/src/cross_section_measurement/05b_make_tables.py b/dps/analysis/xsection/05b_make_tables.py similarity index 92% rename from src/cross_section_measurement/05b_make_tables.py rename to dps/analysis/xsection/05b_make_tables.py index 7f9ba5e5..f5ee1def 100644 --- a/src/cross_section_measurement/05b_make_tables.py +++ b/dps/analysis/xsection/05b_make_tables.py @@ -1,9 +1,9 @@ #!/usr/bin/env python # encoding: utf-8 ''' -src.cross_section_measurement.05b_make_tables -- creates tables for documentation +dps.analysis.xsection.05b_make_tables -- creates tables for documentation -src.cross_section_measurement.05b_make_tables is a script to create +dps.analysis.xsection.05b_make_tables is a script to create @copyright: 2015 University of Bristol. All rights reserved. @@ -18,14 +18,13 @@ import os from optparse import OptionParser -from config.cross_section_config import XSectionConfig -from tools.table import PrintTable -from src.cross_section_measurement.lib import read_initial_normalisation,\ +from dps.config.xsection import XSectionConfig +from dps.utils.table import PrintTable +from dps.analysis.xsection.lib import read_initial_normalisation,\ read_unfolded_normalisation -from config.variable_binning import bin_edges_vis, bin_edges,\ - variable_bins_latex, variable_bins_visiblePS_latex +from dps.config.variable_binning import variable_bins_latex, variable_bins_visiblePS_latex -from tools.logger import log +from dps.utils.logger import log from math import ceil mylog = log["05_make_tables"] @@ -224,7 +223,7 @@ def process_values(values): if PROFILE: import cProfile import pstats - profile_filename = 'src.cross_section_measurement.05b_make_tables_profile.txt' + profile_filename = 'dps.analysis.xsection.05b_make_tables_profile.txt' cProfile.run('main()', profile_filename) statsfile = open("profile_stats.txt", "wb") p = pstats.Stats(profile_filename, stream=statsfile) diff --git a/src/cross_section_measurement/06_compare_energies.py b/dps/analysis/xsection/06_compare_energies.py similarity index 95% rename from src/cross_section_measurement/06_compare_energies.py rename to dps/analysis/xsection/06_compare_energies.py index 71e7ade7..014385e8 100644 --- a/src/cross_section_measurement/06_compare_energies.py +++ b/dps/analysis/xsection/06_compare_energies.py @@ -10,15 +10,15 @@ import matplotlib.pyplot as plt import rootpy.plotting.root2matplotlib as rplt -from src.cross_section_measurement.lib import read_xsection_measurement_results -from config import XSectionConfig -from config.variable_binning import bin_edges_full -from config.latex_labels import variables_latex -from config import CMS +from dps.analysis.xsection.lib import read_xsection_measurement_results +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import bin_edges_full +from dps.config.latex_labels import variables_latex +from dps.config import CMS from rootpy.plotting import Graph from ROOT import kRed, kMagenta, kBlue from matplotlib.ticker import MultipleLocator -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults output_formats = ['pdf', 'png'] diff --git a/src/cross_section_measurement/98_fit_cross_checks.py b/dps/analysis/xsection/98_fit_cross_checks.py similarity index 97% rename from src/cross_section_measurement/98_fit_cross_checks.py rename to dps/analysis/xsection/98_fit_cross_checks.py index baa8458f..0632c11d 100644 --- a/src/cross_section_measurement/98_fit_cross_checks.py +++ b/dps/analysis/xsection/98_fit_cross_checks.py @@ -1,8 +1,8 @@ -from config import CMS +from dps.config import CMS from optparse import OptionParser -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.plotting import Histogram_properties +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.plotting import Histogram_properties from matplotlib import pyplot as plt from matplotlib import rc @@ -12,8 +12,8 @@ import rootpy.plotting.root2matplotlib as rplt from rootpy.plotting import Hist2D import linecache -from config.variable_binning import variable_bins_ROOT -from config.latex_labels import samples_latex +from dps.config.variable_binning import variable_bins_ROOT +from dps.config.latex_labels import samples_latex def get_fit_results( variable, channel ): global path_to_JSON, category, met_type diff --git a/src/cross_section_measurement/98b_fit_cross_checks.py b/dps/analysis/xsection/98b_fit_cross_checks.py similarity index 92% rename from src/cross_section_measurement/98b_fit_cross_checks.py rename to dps/analysis/xsection/98b_fit_cross_checks.py index 37003fab..3e2b061c 100644 --- a/src/cross_section_measurement/98b_fit_cross_checks.py +++ b/dps/analysis/xsection/98b_fit_cross_checks.py @@ -1,17 +1,18 @@ from optparse import OptionParser -from config import XSectionConfig, fit_var_inputs -from config.variable_binning import bin_edges_vis -from lib import read_normalisation, closure_tests -# from tools.file_utilities import read_data_from_JSON -# from tools.plotting import Histogram_properties +# @BROKEN +from dps.config.xsection import XSectionConfig, fit_var_inputs +from dps.config.variable_binning import bin_edges_vis +from dps.analysis.xsection.lib import read_normalisation, closure_tests +# from dps.utils.file_utilities import read_data_from_JSON +# from dps.utils.plotting import Histogram_properties # from matplotlib import pyplot as plt # import rootpy.plotting.root2matplotlib as rplt -from tools.hist_utilities import value_error_tuplelist_to_hist, spread_x, \ +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, spread_x, \ limit_range_y -from tools.plotting import compare_measurements, Histogram_properties -from config.latex_labels import fit_variables_latex, samples_latex -from src.cross_section_measurement.lib import read_initial_normalisation +from dps.utils.plotting import compare_measurements, Histogram_properties +from dps.config.latex_labels import fit_variables_latex, samples_latex +from dps.analysis.xsection.lib import read_initial_normalisation # import linecache # # from config.variable_binning import variable_bins_ROOT diff --git a/src/cross_section_measurement/98c_fit_cross_checks.py b/dps/analysis/xsection/98c_fit_cross_checks.py similarity index 96% rename from src/cross_section_measurement/98c_fit_cross_checks.py rename to dps/analysis/xsection/98c_fit_cross_checks.py index d20fe9f4..2c049141 100644 --- a/src/cross_section_measurement/98c_fit_cross_checks.py +++ b/dps/analysis/xsection/98c_fit_cross_checks.py @@ -5,16 +5,16 @@ ''' import os from optparse import OptionParser - -from config import XSectionConfig, fit_var_inputs, latex_labels -from src.cross_section_measurement.lib import closure_tests, read_fit_templates, \ +# @BROKEN +from dps.config.xsection import XSectionConfig, fit_var_inputs, latex_labels +from dps.analysis.xsection.lib import closure_tests, read_fit_templates, \ read_initial_normalisation -from tools.file_utilities import read_data_from_JSON, write_data_to_JSON -from config.variable_binning import variable_bins_ROOT, fit_variable_bin_edges, bin_edges_vis -from tools.Fitting import FitData, FitDataCollection, Minuit -from tools.hist_utilities import value_tuplelist_to_hist -from tools.plotting import Histogram_properties, compare_measurements -from config import CMS +from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON +from dps.config.variable_binning import variable_bins_ROOT, fit_variable_bin_edges, bin_edges_vis +from dps.utils.Fitting import FitData, FitDataCollection, Minuit +from dps.utils.hist_utilities import value_tuplelist_to_hist +from dps.utils.plotting import Histogram_properties, compare_measurements +from dps.config import CMS from matplotlib import rc rc( 'font', **CMS.font ) rc( 'text', usetex = True ) diff --git a/src/cross_section_measurement/99_QCD_cross_checks.py b/dps/analysis/xsection/99_QCD_cross_checks.py similarity index 97% rename from src/cross_section_measurement/99_QCD_cross_checks.py rename to dps/analysis/xsection/99_QCD_cross_checks.py index 8d9f1fa3..03f7c5f6 100644 --- a/src/cross_section_measurement/99_QCD_cross_checks.py +++ b/dps/analysis/xsection/99_QCD_cross_checks.py @@ -1,12 +1,12 @@ -from config import CMS +from dps.config import CMS from optparse import OptionParser -from config.latex_labels import b_tag_bins_latex -from config.variable_binning import bin_edges_vis, variable_bins_ROOT -from config import XSectionConfig -from tools.ROOT_utils import get_histograms_from_files -from tools.file_utilities import read_data_from_JSON -from tools.plotting import Histogram_properties, make_control_region_comparison -from tools.hist_utilities import value_error_tuplelist_to_hist, rebin_asymmetric +from dps.config.latex_labels import b_tag_bins_latex +from dps.config.variable_binning import bin_edges_vis, variable_bins_ROOT +from dps.config.xsection import XSectionConfig +from dps.utils.ROOT_utils import get_histograms_from_files +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.plotting import Histogram_properties, make_control_region_comparison +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, rebin_asymmetric from ROOT import Double from uncertainties import ufloat @@ -69,7 +69,7 @@ def do_shape_check(channel, control_region_1, control_region_2, variable, normal histograms = get_histograms_from_files([control_region_1], histogram_files) region_1_tmp = histograms[channel][control_region_1].Clone() - histograms['TTJet'][control_region_1].Clone() - histograms['V+Jets'][control_region_1].Clone() - histograms['SingleTop'][control_region_1].Clone() - region_1 = rebin_asymmetric(region_1_tmp, bin_edges[variable]) + region_1 = rebin_asymmetric(region_1_tmp, bin_edges_vis[variable]) fit_results_QCD = normalisation[variable]['QCD'] region_2 = value_error_tuplelist_to_hist(fit_results_QCD, bin_edges_vis[variable]) @@ -93,7 +93,7 @@ def do_shape_check(channel, control_region_1, control_region_2, variable, normal region_1 = rebin_asymmetric(region_1_tmp, bin_edges_vis[variable]) fit_results_QCD = normalisation[variable]['QCD'] - region_2 = value_error_tuplelist_to_hist(fit_results_QCD, bin_edges[variable]) + region_2 = value_error_tuplelist_to_hist(fit_results_QCD, bin_edges_vis[variable]) histogram_properties = Histogram_properties() histogram_properties.name = 'QCD_control_region_comparison_' + channel + '_' + variable + '_fits_with_noniso_' + b_tag_bin diff --git a/src/cross_section_measurement/README.md b/dps/analysis/xsection/README.md similarity index 100% rename from src/cross_section_measurement/README.md rename to dps/analysis/xsection/README.md diff --git a/src/__init__.py b/dps/analysis/xsection/__init__.py similarity index 100% rename from src/__init__.py rename to dps/analysis/xsection/__init__.py diff --git a/src/cross_section_measurement/approval_conditions.py b/dps/analysis/xsection/approval_conditions.py similarity index 97% rename from src/cross_section_measurement/approval_conditions.py rename to dps/analysis/xsection/approval_conditions.py index f41f1be9..7aa78a24 100644 --- a/src/cross_section_measurement/approval_conditions.py +++ b/dps/analysis/xsection/approval_conditions.py @@ -2,16 +2,16 @@ Approval conditions for TOP-15-013 ''' from __future__ import division -from tools.plotting import Histogram_properties, compare_histograms, Plot, \ +from dps.utils.plotting import Histogram_properties, compare_histograms, Plot, \ ErrorBand, compare_measurements -from tools.file_utilities import read_data_from_JSON -from tools.hist_utilities import value_error_tuplelist_to_hist,\ +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.hist_utilities import value_error_tuplelist_to_hist,\ clean_control_region, absolute, value_tuplelist_to_hist, spread_x,\ value_errors_tuplelist_to_graph -from config.variable_binning import bin_edges_vis -from config.latex_labels import variables_latex -from tools.ROOT_utils import get_histogram_from_tree -from config.cross_section_config import XSectionConfig +from dps.config.variable_binning import bin_edges_vis +from dps.config.latex_labels import variables_latex +from dps.utils.ROOT_utils import get_histogram_from_tree +from dps.config.xsection import XSectionConfig from collections import namedtuple @@ -432,7 +432,7 @@ def debug_last_bin(): if __name__ == '__main__': import sys if '-d' in sys.argv: - from tools.logger import log + from dps.utils.logger import log log.setLevel(log.DEBUG) compare_combine_before_after_unfolding(measurement='unfolded_normalisation') diff --git a/src/cross_section_measurement/compareQCDControlRegions.py b/dps/analysis/xsection/compareQCDControlRegions.py similarity index 97% rename from src/cross_section_measurement/compareQCDControlRegions.py rename to dps/analysis/xsection/compareQCDControlRegions.py index c4c7c759..2db1db7b 100644 --- a/src/cross_section_measurement/compareQCDControlRegions.py +++ b/dps/analysis/xsection/compareQCDControlRegions.py @@ -1,16 +1,13 @@ from optparse import OptionParser -from config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ - variables_latex, fit_variables_latex, control_plots_latex -from config.variable_binning import control_plots_bins -from config.histogram_colours import histogram_colours as colours -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, \ -make_control_region_comparison -from rootpy.plotting import Hist -from tools.hist_utilities import prepare_histograms, clean_control_region, get_normalisation_error, get_fitted_normalisation -from tools.ROOT_utils import get_histograms_from_trees, set_root_defaults -from tools.latex import setup_matplotlib +from dps.config.latex_labels import samples_latex, variables_latex, control_plots_latex +from dps.config.variable_binning import control_plots_bins +from dps.config.histogram_colours import histogram_colours as colours +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.plotting import Histogram_properties, make_control_region_comparison +from dps.utils.hist_utilities import prepare_histograms, clean_control_region +from dps.utils.ROOT_utils import get_histograms_from_trees, set_root_defaults +from dps.utils.latex import setup_matplotlib from uncertainties import ufloat # latex, font, etc diff --git a/src/cross_section_measurement/create_measurement.py b/dps/analysis/xsection/create_measurement.py similarity index 95% rename from src/cross_section_measurement/create_measurement.py rename to dps/analysis/xsection/create_measurement.py index 1a3c0dac..dfcdf93e 100644 --- a/src/cross_section_measurement/create_measurement.py +++ b/dps/analysis/xsection/create_measurement.py @@ -11,11 +11,12 @@ ''' from __future__ import print_function from optparse import OptionParser -import tools.measurement -from config import XSectionConfig, variable_binning -from tools.input import Input -from tools.logger import log +from dps.config.xsection import XSectionConfig +from dps.config import variable_binning +from dps.utils.input import Input +from dps.utils.logger import log from copy import deepcopy +from dps.utils.measurement import Measurement, Systematic # define logger for this module create_measurement_log = log["01b_get_ttjet_normalisation"] @@ -74,24 +75,24 @@ def create_measurement(com, category, variable, channel, phase_space, norm_metho m = None if category == 'central': - m = tools.measurement.Measurement(category) + m = Measurement(category) else: vjet_systematics = [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics] if category in config.categories_and_prefixes.keys() or \ category in config.met_systematics_suffixes or \ category in vjet_systematics: - m = tools.measurement.Systematic(category, - stype=tools.measurement.Systematic.SHAPE, - affected_samples=config.samples) + m = Systematic(category, + stype=Systematic.SHAPE, + affected_samples=config.samples) elif category in config.rate_changing_systematics_names: m = config.rate_changing_systematics_values[category] elif category == 'QCD_shape': - m = tools.measurement.Systematic(category, - stype=tools.measurement.Systematic.SHAPE, - affected_samples=['QCD'], - ) + m = Systematic(category, + stype=Systematic.SHAPE, + affected_samples=['QCD'], + ) m.setVariable(variable) m.setCentreOfMassEnergy(com) @@ -163,7 +164,7 @@ def create_measurement(com, category, variable, channel, phase_space, norm_metho ), ) - m_qcd = tools.measurement.Measurement(category) + m_qcd = Measurement(category) m_qcd.setVariable(variable) m_qcd.setCentreOfMassEnergy(com) @@ -241,7 +242,7 @@ def create_measurement(com, category, variable, channel, phase_space, norm_metho if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: v_template_category = category.replace( config.vjets_theory_systematic_prefix, '') - m_vjets = tools.measurement.Measurement(category) + m_vjets = Measurement(category) m_vjets.setVariable(variable) m_vjets.setCentreOfMassEnergy(com) m_vjets.addSample( @@ -264,7 +265,7 @@ def create_measurement(com, category, variable, channel, phase_space, norm_metho path = base_path + '{category}.json' m.toJSON(path.format(**inputs)) else: - if m.type == tools.measurement.Systematic.SHAPE: + if m.type == Systematic.SHAPE: inputs['type'] = 'shape_systematic' else: inputs['type'] = 'rate_systematic' @@ -412,7 +413,7 @@ def create_input(config, sample, variable, category, channel, template, scale = 1. m = kwargs['measurement'] - if m.type == tools.measurement.Systematic.RATE: + if m.type == Systematic.RATE: if 'luminosity' in m.name: lumi_scale = lumi_scale * m.scale else: diff --git a/src/cross_section_measurement/lib.py b/dps/analysis/xsection/lib.py similarity index 97% rename from src/cross_section_measurement/lib.py rename to dps/analysis/xsection/lib.py index 1c75fb2a..af2f8160 100644 --- a/src/cross_section_measurement/lib.py +++ b/dps/analysis/xsection/lib.py @@ -1,14 +1,14 @@ # Library for all cross section measurement specific functions # that need to be shared between scripts from rootpy.io import File -from config.variable_binning import bin_edges_vis +from dps.config.variable_binning import bin_edges_vis -from tools.hist_utilities import value_error_tuplelist_to_hist, value_errors_tuplelist_to_graph -from tools.file_utilities import read_data_from_JSON -from tools.Timer import Timer +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, value_errors_tuplelist_to_graph +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.Timer import Timer -from tools.logger import log -mylog = log["src.cross_section_measurement.lib"] +from dps.utils.logger import log +mylog = log["dps.analysis.xsection.lib"] closure_tests = { 'simple': {'V+Jets': 1.1, 'SingleTop': 1.2, 'TTJet': 1.3, 'QCD': 1.5}, diff --git a/src/cross_section_measurement/make_QCD_plots_fromTrees.py b/dps/analysis/xsection/make_QCD_plots_fromTrees.py similarity index 94% rename from src/cross_section_measurement/make_QCD_plots_fromTrees.py rename to dps/analysis/xsection/make_QCD_plots_fromTrees.py index f6f868eb..933c3f5c 100644 --- a/src/cross_section_measurement/make_QCD_plots_fromTrees.py +++ b/dps/analysis/xsection/make_QCD_plots_fromTrees.py @@ -1,12 +1,11 @@ from optparse import OptionParser -from config.latex_labels import b_tag_bins_latex, samples_latex, fit_variables_latex, fit_variables_units_latex, variables_latex, control_plots_latex -from config.variable_binning import fit_variable_bin_edges, bin_edges_vis, control_plots_bins -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, \ -make_control_region_comparison -from tools.hist_utilities import prepare_histograms -from tools.ROOT_utils import get_histograms_from_files, set_root_defaults, get_histograms_from_trees +from dps.config.latex_labels import samples_latex, fit_variables_latex, fit_variables_units_latex, variables_latex, control_plots_latex +from dps.config.variable_binning import fit_variable_bin_edges, bin_edges_vis, control_plots_bins +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties +from dps.utils.hist_utilities import prepare_histograms +from dps.utils.ROOT_utils import set_root_defaults, get_histograms_from_trees channels = [ 'EPlusJets', diff --git a/src/cross_section_measurement/make_binning_plots.py b/dps/analysis/xsection/make_binning_plots.py similarity index 92% rename from src/cross_section_measurement/make_binning_plots.py rename to dps/analysis/xsection/make_binning_plots.py index 26a9b145..bb310041 100644 --- a/src/cross_section_measurement/make_binning_plots.py +++ b/dps/analysis/xsection/make_binning_plots.py @@ -9,18 +9,17 @@ mpl.use( 'agg' ) import matplotlib.pyplot as plt import rootpy.plotting.root2matplotlib as rplt -from config import CMS +from dps.config import CMS import matplotlib.cm as cm # from itertools import cycle -from config.latex_labels import b_tag_bins_latex, variables_latex -from config.variable_binning import bin_edges_full, bin_edges_vis -from config import XSectionConfig -from tools.ROOT_utils import get_histogram_from_file -from tools.file_utilities import make_folder_if_not_exists -from tools.file_utilities import read_data_from_JSON -from tools.hist_utilities import value_tuplelist_to_hist -from tools.Calculation import calculate_purities, calculate_stabilities -from tools.hist_utilities import rebin_2d +from dps.config.latex_labels import b_tag_bins_latex, variables_latex +from dps.config.variable_binning import bin_edges_full, bin_edges_vis +from dps.config.xsection import XSectionConfig +from dps.utils.ROOT_utils import get_histogram_from_file +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.hist_utilities import value_tuplelist_to_hist +from dps.utils.Calculation import calculate_purities, calculate_stabilities +from dps.utils.hist_utilities import rebin_2d from matplotlib import rc rc( 'font', **CMS.font ) diff --git a/src/cross_section_measurement/make_control_plots.py b/dps/analysis/xsection/make_control_plots.py similarity index 99% rename from src/cross_section_measurement/make_control_plots.py rename to dps/analysis/xsection/make_control_plots.py index 6846f7fe..36fb432d 100644 --- a/src/cross_section_measurement/make_control_plots.py +++ b/dps/analysis/xsection/make_control_plots.py @@ -1,14 +1,14 @@ from optparse import OptionParser -from config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ +from dps.config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ variables_latex, fit_variables_latex -from config.variable_binning import variable_bins_ROOT -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, \ +from dps.config.variable_binning import variable_bins_ROOT +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties, \ make_control_region_comparison -from tools.hist_utilities import prepare_histograms, clean_control_region -from tools.ROOT_utils import get_histograms_from_files, set_root_defaults -from tools.latex import setup_matplotlib +from dps.utils.hist_utilities import prepare_histograms, clean_control_region +from dps.utils.ROOT_utils import get_histograms_from_files, set_root_defaults +from dps.utils.latex import setup_matplotlib # latex, font, etc setup_matplotlib() diff --git a/src/cross_section_measurement/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py similarity index 98% rename from src/cross_section_measurement/make_control_plots_fromTrees.py rename to dps/analysis/xsection/make_control_plots_fromTrees.py index 03cc7083..5149b4f5 100644 --- a/src/cross_section_measurement/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -1,20 +1,16 @@ from optparse import OptionParser -from config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ +from dps.config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ variables_latex, fit_variables_latex, control_plots_latex -from config.variable_binning import fit_variable_bin_edges, control_plots_bins -from config.histogram_colours import histogram_colours as colours -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, \ - make_control_region_comparison -from tools.plotting import make_plot as make_plot_tmp -from rootpy.plotting import Hist -from tools.hist_utilities import prepare_histograms, clean_control_region, get_normalisation_error, get_fitted_normalisation -from tools.ROOT_utils import get_histograms_from_trees, set_root_defaults -from tools.latex import setup_matplotlib +from dps.config.variable_binning import fit_variable_bin_edges, control_plots_bins +from dps.config.histogram_colours import histogram_colours as colours +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties +from dps.utils.hist_utilities import prepare_histograms, clean_control_region +from dps.utils.ROOT_utils import get_histograms_from_trees, set_root_defaults +from dps.utils.latex import setup_matplotlib from uncertainties import ufloat -from math import sqrt -from copy import deepcopy +from dps.analysis.xsection.compareQCDControlRegions import getPUWeights # latex, font, etc setup_matplotlib() diff --git a/src/cross_section_measurement/make_cutflow_8TeV.py b/dps/analysis/xsection/make_cutflow_8TeV.py similarity index 97% rename from src/cross_section_measurement/make_cutflow_8TeV.py rename to dps/analysis/xsection/make_cutflow_8TeV.py index d28a30d4..148fd5a1 100644 --- a/src/cross_section_measurement/make_cutflow_8TeV.py +++ b/dps/analysis/xsection/make_cutflow_8TeV.py @@ -1,6 +1,6 @@ from math import sqrt -from tools.ROOT_utils import get_histograms_from_files -from config import XSectionConfig +from dps.utils.ROOT_utils import get_histograms_from_files +from dps.config.xsection import XSectionConfig cuts = None cuts_electrons = [ diff --git a/src/cross_section_measurement/make_fit_variable_plots.py b/dps/analysis/xsection/make_fit_variable_plots.py similarity index 97% rename from src/cross_section_measurement/make_fit_variable_plots.py rename to dps/analysis/xsection/make_fit_variable_plots.py index c2881c4b..ecd4d253 100644 --- a/src/cross_section_measurement/make_fit_variable_plots.py +++ b/dps/analysis/xsection/make_fit_variable_plots.py @@ -7,15 +7,15 @@ from copy import copy, deepcopy from optparse import OptionParser -from tools.ROOT_utils import get_histograms_from_files -from tools.hist_utilities import prepare_histograms, clean_control_region, spread_x -from tools.file_utilities import make_folder_if_not_exists -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, make_shape_comparison_plot,\ +from dps.utils.ROOT_utils import get_histograms_from_files +from dps.utils.hist_utilities import prepare_histograms, clean_control_region, spread_x +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties, make_shape_comparison_plot,\ compare_measurements -from config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex -from config.variable_binning import variable_bins_ROOT, fit_variable_bin_edges, bin_edges -from config import XSectionConfig -from tools.latex import setup_matplotlib +from dps.config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex +from dps.config.variable_binning import variable_bins_ROOT, fit_variable_bin_edges, bin_edges_vis +from dps.config.xsection import XSectionConfig +from dps.utils.latex import setup_matplotlib # latex, font, etc setup_matplotlib() @@ -37,7 +37,7 @@ b_tag_bin_ctl = '0orMoreBtag' category = 'central' -variables = bin_edges.keys() +variables = bin_edges_vis.keys() save_as = ['pdf'] diff --git a/src/cross_section_measurement/make_new_physics_plots_8TeV.py b/dps/analysis/xsection/make_new_physics_plots_8TeV.py similarity index 96% rename from src/cross_section_measurement/make_new_physics_plots_8TeV.py rename to dps/analysis/xsection/make_new_physics_plots_8TeV.py index 271e727b..5170ca44 100644 --- a/src/cross_section_measurement/make_new_physics_plots_8TeV.py +++ b/dps/analysis/xsection/make_new_physics_plots_8TeV.py @@ -1,10 +1,11 @@ -from config import CMS, XSectionConfig +from dps.config import CMS +from dps.config.xsection import XSectionConfig from optparse import OptionParser -from tools.ROOT_utils import get_histograms_from_files -from tools.file_utilities import read_data_from_JSON -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties -from tools.hist_utilities import prepare_histograms -from config.variable_binning import variable_bins_ROOT +from dps.utils.ROOT_utils import get_histograms_from_files +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties +from dps.utils.hist_utilities import prepare_histograms +from dps.config.variable_binning import variable_bins_ROOT def get_fitted_normalisation(variable, channel): global path_to_JSON, category, met_type @@ -76,7 +77,7 @@ def get_normalisation_error(normalisation): CMS.axis_label_minor['labelsize'] = 40 CMS.legend_properties['size'] = 40 - from config.latex_labels import b_tag_bins_latex, samples_latex + from dps.config.latex_labels import b_tag_bins_latex, samples_latex histogram_files = { 'data' : measurement_config.data_file_electron, diff --git a/src/cross_section_measurement/make_rivet_hists.py b/dps/analysis/xsection/make_rivet_hists.py similarity index 93% rename from src/cross_section_measurement/make_rivet_hists.py rename to dps/analysis/xsection/make_rivet_hists.py index a423c75c..b90c87f6 100644 --- a/src/cross_section_measurement/make_rivet_hists.py +++ b/dps/analysis/xsection/make_rivet_hists.py @@ -14,11 +14,11 @@ data/absolute_eta_M3_angle_bl/{centre_of_mass_energy}TeV/{variable}/xsection_measurement_results/{channel}/central/ ''' from optparse import OptionParser -from config import XSectionConfig -from config.variable_binning import bin_edges_full -from tools.file_utilities import read_data_from_JSON +from dps.config.xsection import XSectionConfig +from dps.config.variable_binning import bin_edges_full +from dps.utils.file_utilities import read_data_from_JSON from rootpy.io import File -from tools.hist_utilities import value_error_tuplelist_to_hist,\ +from dps.utils.hist_utilities import value_error_tuplelist_to_hist,\ value_errors_tuplelist_to_graph diff --git a/src/cross_section_measurement/make_ttbarRecoPlots.py b/dps/analysis/xsection/make_ttbarRecoPlots.py similarity index 95% rename from src/cross_section_measurement/make_ttbarRecoPlots.py rename to dps/analysis/xsection/make_ttbarRecoPlots.py index 0808beba..717b8896 100644 --- a/src/cross_section_measurement/make_ttbarRecoPlots.py +++ b/dps/analysis/xsection/make_ttbarRecoPlots.py @@ -1,14 +1,13 @@ from optparse import OptionParser -from config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ - variables_latex, fit_variables_latex, control_plots_latex -from config.variable_binning import variable_bins_ROOT, bin_edges_vis, control_plots_bins -from config import XSectionConfig -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, \ -make_control_region_comparison -from tools.hist_utilities import prepare_histograms, clean_control_region, get_normalisation_error, get_fitted_normalisation -from tools.ROOT_utils import get_histograms_from_trees, set_root_defaults -from tools.latex import setup_matplotlib +from dps.config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ + variables_latex +from dps.config.variable_binning import bin_edges_vis +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties +from dps.utils.hist_utilities import prepare_histograms, get_normalisation_error, get_fitted_normalisation +from dps.utils.ROOT_utils import get_histograms_from_trees, set_root_defaults +from dps.utils.latex import setup_matplotlib # latex, font, etc setup_matplotlib() diff --git a/src/cross_section_measurement/__init__.py b/dps/analysis/zprime_analysis/__init__.py similarity index 100% rename from src/cross_section_measurement/__init__.py rename to dps/analysis/zprime_analysis/__init__.py diff --git a/src/zprime_analysis/estimate_QCD_rate.py b/dps/analysis/zprime_analysis/estimate_QCD_rate.py similarity index 97% rename from src/zprime_analysis/estimate_QCD_rate.py rename to dps/analysis/zprime_analysis/estimate_QCD_rate.py index 7fb64af2..5f348872 100644 --- a/src/zprime_analysis/estimate_QCD_rate.py +++ b/dps/analysis/zprime_analysis/estimate_QCD_rate.py @@ -9,8 +9,8 @@ In addition to above it should provide an easy way to present the binned estimates (MET, b-tag, other bins) ''' -from tools.QCD_rate_estimation import estimate_with_fit_to_relative_isolation -import tools.QCD_rate_estimation as QCD_rate_estimation +from dps.utils.QCD_rate_estimation import estimate_with_fit_to_relative_isolation +import dps.utils.QCD_rate_estimation as QCD_rate_estimation from rootpy.io import File if __name__ == "__main__": diff --git a/src/zprime_analysis/make_control_plots.py b/dps/analysis/zprime_analysis/make_control_plots.py similarity index 90% rename from src/zprime_analysis/make_control_plots.py rename to dps/analysis/zprime_analysis/make_control_plots.py index 1df93ee0..51830c57 100644 --- a/src/zprime_analysis/make_control_plots.py +++ b/dps/analysis/zprime_analysis/make_control_plots.py @@ -5,10 +5,10 @@ ''' -from config import CMS -from tools.ROOT_utils import get_histograms_from_files -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties -from tools.hist_utilities import prepare_histograms +from dps.config import CMS +from dps.utils.ROOT_utils import get_histograms_from_files +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties +from dps.utils.hist_utilities import prepare_histograms if __name__ == '__main__': CMS.title['fontsize'] = 40 @@ -18,7 +18,7 @@ CMS.axis_label_minor['labelsize'] = 40 CMS.legend_properties['size'] = 40 - from config.latex_labels import b_tag_bins_latex, samples_latex + from dps.config.latex_labels import b_tag_bins_latex, samples_latex path_to_files = '/storage/TopQuarkGroup/results/histogramfiles/AN-11-265_V2/' lumi = 5028 diff --git a/src/zprime_analysis/make_control_region_plots.py b/dps/analysis/zprime_analysis/make_control_region_plots.py similarity index 98% rename from src/zprime_analysis/make_control_region_plots.py rename to dps/analysis/zprime_analysis/make_control_region_plots.py index 15039bac..c4eb55d8 100644 --- a/src/zprime_analysis/make_control_region_plots.py +++ b/dps/analysis/zprime_analysis/make_control_region_plots.py @@ -6,8 +6,8 @@ from matplotlib.ticker import MultipleLocator import matplotlib.pyplot as plt -from config import CMS -from tools.ROOT_utils import get_histograms_from_files, set_root_defaults +from dps.config import CMS +from dps.utils.ROOT_utils import get_histograms_from_files, set_root_defaults from copy import deepcopy diff --git a/src/zprime_analysis/make_control_region_plots_2.py b/dps/analysis/zprime_analysis/make_control_region_plots_2.py similarity index 90% rename from src/zprime_analysis/make_control_region_plots_2.py rename to dps/analysis/zprime_analysis/make_control_region_plots_2.py index e14ec0d3..7267d527 100644 --- a/src/zprime_analysis/make_control_region_plots_2.py +++ b/dps/analysis/zprime_analysis/make_control_region_plots_2.py @@ -1,11 +1,11 @@ -from config import CMS -from tools.ROOT_utils import get_histograms_from_files +from dps.config import CMS +from dps.utils.ROOT_utils import get_histograms_from_files -from tools.ROOT_utils import set_root_defaults +from dps.utils.ROOT_utils import set_root_defaults -from make_control_region_plots import make_control_region_comparison -from make_control_region_plots import make_control_region_data_mc_comparision -from make_control_region_plots import prepare_histograms +from .make_control_region_plots import make_control_region_comparison +from .make_control_region_plots import make_control_region_data_mc_comparision +from .make_control_region_plots import prepare_histograms if __name__ == '__main__': set_root_defaults() diff --git a/condor/README.md b/dps/condor/README.md similarity index 100% rename from condor/README.md rename to dps/condor/README.md diff --git a/condor/__init__.py b/dps/condor/__init__.py similarity index 100% rename from condor/__init__.py rename to dps/condor/__init__.py diff --git a/condor/job.py b/dps/condor/job.py similarity index 98% rename from condor/job.py rename to dps/condor/job.py index b21bfc14..caf39431 100644 --- a/condor/job.py +++ b/dps/condor/job.py @@ -2,7 +2,7 @@ ''' import pickle -from tools.file_utilities import make_folder_if_not_exists +from dps.utils.file_utilities import make_folder_if_not_exists import time import subprocess import getpass diff --git a/condor/job_template b/dps/condor/job_template similarity index 100% rename from condor/job_template rename to dps/condor/job_template diff --git a/condor/jobtypes/__init__.py b/dps/condor/jobtypes/__init__.py similarity index 100% rename from condor/jobtypes/__init__.py rename to dps/condor/jobtypes/__init__.py diff --git a/condor/jobtypes/create_toy_mc_from_tree_job.py b/dps/condor/jobtypes/create_toy_mc_from_tree_job.py similarity index 91% rename from condor/jobtypes/create_toy_mc_from_tree_job.py rename to dps/condor/jobtypes/create_toy_mc_from_tree_job.py index fbea4833..d8a4e50c 100644 --- a/condor/jobtypes/create_toy_mc_from_tree_job.py +++ b/dps/condor/jobtypes/create_toy_mc_from_tree_job.py @@ -1,8 +1,8 @@ ''' Condor jobs description for src/unfolding_tests/create_toy_mc_from_tree ''' -from condor import Job -from config.cross_section_config import XSectionConfig +from .. import Job +from dps.config.xsection import XSectionConfig class CreateToyMCFromTreeJob(Job): @@ -20,7 +20,7 @@ def __init__(self, output_folder, n_toy, n_input_mc, def run(self): - import src.unfolding_tests.create_toy_mc_from_tree as toy + import dps.analysis.unfolding_tests.create_toy_mc_from_tree as toy toy.generate_toy( n_toy=self.n_toy, n_input_mc=self.n_input_mc, @@ -56,7 +56,7 @@ def get_mapping(self, n): yield i, len(l[i:i + new_n]) def tar_output(self, job_id, subjob_id): - import src.unfolding_tests.create_toy_mc_from_tree as toy + import dps.analysis.unfolding_tests.create_toy_mc_from_tree as toy import shutil output_file = toy.get_output_file_name(self.output_folder, self.n_toy, diff --git a/condor/jobtypes/create_toy_mc_job.py b/dps/condor/jobtypes/create_toy_mc_job.py similarity index 95% rename from condor/jobtypes/create_toy_mc_job.py rename to dps/condor/jobtypes/create_toy_mc_job.py index 4cc6e9e1..9393f4be 100644 --- a/condor/jobtypes/create_toy_mc_job.py +++ b/dps/condor/jobtypes/create_toy_mc_job.py @@ -1,7 +1,7 @@ ''' Condor jobs description for src/unfolding_tests/create_toy_mc ''' -from condor import Job +from .. import Job class CreateToyMCJob(Job): @@ -21,7 +21,7 @@ def __init__(self, input_file, output_folder, variable, n_toy, self.additional_input_files = [input_file] def run(self): - import src.unfolding_tests.create_toy_mc as toy + import dps.analysis.unfolding_tests.create_toy_mc as toy toy.create_toy_mc(input_file=self.input_file, output_folder=self.output_folder, variable=self.variable, @@ -59,7 +59,7 @@ def get_mapping(self, n): yield i + 1, len(l[i:i + new_n]) def tar_output(self, job_id, subjob_id): - import src.unfolding_tests.create_toy_mc as toy + import dps.analysis.unfolding_tests.create_toy_mc as toy import shutil output_file = toy.get_output_file_name(self.output_folder, self.start_at, diff --git a/condor/jobtypes/produce_unfolding_hists_job.py b/dps/condor/jobtypes/produce_unfolding_hists_job.py similarity index 76% rename from condor/jobtypes/produce_unfolding_hists_job.py rename to dps/condor/jobtypes/produce_unfolding_hists_job.py index 6d5a8ba7..9bd85462 100644 --- a/condor/jobtypes/produce_unfolding_hists_job.py +++ b/dps/condor/jobtypes/produce_unfolding_hists_job.py @@ -1,9 +1,9 @@ -from condor import Job +from .. import Job class ProduceUnfoldingHistsJob(Job): ''' - Condor job class for src.produce_unfold_hists.py + Condor job class for dps.analysis.produce_unfold_hists.py ''' def __init__(self, params): diff --git a/condor/jobtypes/unfolding_pull_job.py b/dps/condor/jobtypes/unfolding_pull_job.py similarity index 95% rename from condor/jobtypes/unfolding_pull_job.py rename to dps/condor/jobtypes/unfolding_pull_job.py index 73df09ca..1ecc7a88 100644 --- a/condor/jobtypes/unfolding_pull_job.py +++ b/dps/condor/jobtypes/unfolding_pull_job.py @@ -1,12 +1,12 @@ ''' - Condor job for src.unfolding_tests.create_unfolding_pull_data + Condor job for dps.analysis.unfolding_tests.create_unfolding_pull_data ''' -from condor import Job +from .. import Job class UnfoldingPullJob(Job): ''' - Condor job for src.unfolding_tests.create_unfolding_pull_data + Condor job for dps.analysis.unfolding_tests.create_unfolding_pull_data ''' def __init__(self, input_file_name, method, channel, @@ -37,7 +37,7 @@ def run(self): ''' Run the workload ''' - import src.unfolding_tests.create_unfolding_pull_data as pull + import dps.analysis.unfolding_tests.create_unfolding_pull_data as pull pull.create_unfolding_pull_data(self.input_file_name, self.method, self.channel, self.centre_of_mass, self.variable, self.n_toy_mc, @@ -71,7 +71,7 @@ def split(self, n): In order not to double-count, use_n_toy needs to be reduced per job. ''' - import src.unfolding_tests.create_unfolding_pull_data as pull + import dps.analysis.unfolding_tests.create_unfolding_pull_data as pull if n == 1: return self run_matrix = pull.create_run_matrix(self.n_toy_mc, diff --git a/condor/jobtypes/unfolding_pull_job_new.py b/dps/condor/jobtypes/unfolding_pull_job_new.py similarity index 93% rename from condor/jobtypes/unfolding_pull_job_new.py rename to dps/condor/jobtypes/unfolding_pull_job_new.py index 29a6e668..e58998ef 100644 --- a/condor/jobtypes/unfolding_pull_job_new.py +++ b/dps/condor/jobtypes/unfolding_pull_job_new.py @@ -1,13 +1,13 @@ ''' - Condor job for src.unfolding_tests.create_unfolding_pull_data + Condor job for dps.analysis.unfolding_tests.create_unfolding_pull_data ''' -from condor import Job -from config.cross_section_config import XSectionConfig +from .. import Job +from dps.config.xsection import XSectionConfig class UnfoldingPullJob(Job): ''' - Condor job for src.unfolding_tests.create_unfolding_pull_data + Condor job for dps.analysis.unfolding_tests.create_unfolding_pull_data ''' def __init__(self, input_file_directory, method, channels, @@ -47,8 +47,8 @@ def run(self): ''' Run the workload ''' - import src.unfolding_tests.create_unfolding_pull_data as pull - from tools.ROOT_utils import set_root_defaults + import dps.analysis.unfolding_tests.create_unfolding_pull_data as pull + from dps.utils.ROOT_utils import set_root_defaults set_root_defaults(msg_ignore_level=3001) pulls_file_name = pull.create_unfolding_pull_data(self.input_file_name, self.method, @@ -62,7 +62,7 @@ def run(self): self.tau_value_to_run ) - # import src.unfolding_tests.make_unfolding_pull_plots as plots + # import dps.analysis.unfolding_tests.make_unfolding_pull_plots as plots # plots.makeAllPlots( # file_name = pulls_file_name, # output_directory_base = 'plots/unfolding_pulls' @@ -114,7 +114,6 @@ def tar_output(self, job_id, subjob_id): Creates a tar file from the output of the job ''' import tarfile - import os file_template = 'UnfoldingPullJob_{sample}' file_template += '_{com}TeV_{job_id}.{subjob_id}.tar.gz' output_file = file_template.format( diff --git a/condor/prepare_dps.sh b/dps/condor/prepare_dps.sh similarity index 100% rename from condor/prepare_dps.sh rename to dps/condor/prepare_dps.sh diff --git a/condor/run.sh b/dps/condor/run.sh similarity index 100% rename from condor/run.sh rename to dps/condor/run.sh diff --git a/condor/run_job b/dps/condor/run_job similarity index 96% rename from condor/run_job rename to dps/condor/run_job index 9781b7b1..398fc18f 100755 --- a/condor/run_job +++ b/dps/condor/run_job @@ -8,7 +8,7 @@ from optparse import OptionParser from jobtypes import * import shutil import os -from tools.file_utilities import make_folder_if_not_exists +from dps.utils.file_utilities import make_folder_if_not_exists # from unfolding_pull_job import UnfoldingPullJob parser = OptionParser(__doc__) diff --git a/condor/setup_dps_from_tar.sh b/dps/condor/setup_dps_from_tar.sh similarity index 100% rename from condor/setup_dps_from_tar.sh rename to dps/condor/setup_dps_from_tar.sh diff --git a/config/CMS.py b/dps/config/CMS.py similarity index 100% rename from config/CMS.py rename to dps/config/CMS.py diff --git a/dps/config/RooUnfold.py b/dps/config/RooUnfold.py new file mode 100644 index 00000000..f698a757 --- /dev/null +++ b/dps/config/RooUnfold.py @@ -0,0 +1,33 @@ +''' +Created on 31 Oct 2012 + +@author: kreczko +''' +availablemethods = [ + 'RooUnfoldTUnfold', + 'RooUnfoldBayes', + 'RooUnfoldSvd', + 'RooUnfoldBinByBin', + 'RooUnfoldInvert', + 'TSVDUnfold', + ] + +SVD_k_value = 5 +SVD_tau_value = -1 +SVD_n_toy = 1000 +# 0 = no error treatment: returns sqrt(N) +# 1 = bin-by-bin errors (diagonal covariance matrix) +# 2 = covariance matrix from unfolding +# 3 = covariance matrix from toy MC +error_treatment = 3 +Bayes_n_repeat = 4 + +unfolded_markerStyle = 20 +unfolded_fillStyle = 0 +unfolded_color = 'black' + +truth_color = 'red' +truth_fillStyle = 0 + +measured_color = 'blue' +measured_fillStyle = 0 diff --git a/src/cross_section_measurement/tests/__init__.py b/dps/config/__init__.py similarity index 100% rename from src/cross_section_measurement/tests/__init__.py rename to dps/config/__init__.py diff --git a/config/dataset_info_7TeV.py b/dps/config/dataset_info_7TeV.py similarity index 100% rename from config/dataset_info_7TeV.py rename to dps/config/dataset_info_7TeV.py diff --git a/config/dataset_info_8TeV.py b/dps/config/dataset_info_8TeV.py similarity index 100% rename from config/dataset_info_8TeV.py rename to dps/config/dataset_info_8TeV.py diff --git a/config/file_templates.py b/dps/config/file_templates.py similarity index 100% rename from config/file_templates.py rename to dps/config/file_templates.py diff --git a/config/histogram_colours.py b/dps/config/histogram_colours.py similarity index 100% rename from config/histogram_colours.py rename to dps/config/histogram_colours.py diff --git a/config/latex_labels.py b/dps/config/latex_labels.py similarity index 100% rename from config/latex_labels.py rename to dps/config/latex_labels.py diff --git a/config/merging/qcd_sample_shape_from_data_electron.json b/dps/config/merging/qcd_sample_shape_from_data_electron.json similarity index 100% rename from config/merging/qcd_sample_shape_from_data_electron.json rename to dps/config/merging/qcd_sample_shape_from_data_electron.json diff --git a/config/merging/qcd_sample_shape_from_data_muon.json b/dps/config/merging/qcd_sample_shape_from_data_muon.json similarity index 100% rename from config/merging/qcd_sample_shape_from_data_muon.json rename to dps/config/merging/qcd_sample_shape_from_data_muon.json diff --git a/config/met_systematics.py b/dps/config/met_systematics.py similarity index 100% rename from config/met_systematics.py rename to dps/config/met_systematics.py diff --git a/config/plots/HT_reco_gen_truth_comparison_8TeV.json b/dps/config/plots/HT_reco_gen_truth_comparison_8TeV.json similarity index 100% rename from config/plots/HT_reco_gen_truth_comparison_8TeV.json rename to dps/config/plots/HT_reco_gen_truth_comparison_8TeV.json diff --git a/config/plots/MET_7TeV_electron_channel.json b/dps/config/plots/MET_7TeV_electron_channel.json similarity index 100% rename from config/plots/MET_7TeV_electron_channel.json rename to dps/config/plots/MET_7TeV_electron_channel.json diff --git a/config/plots/MET_8TeV_electron_channel.json b/dps/config/plots/MET_8TeV_electron_channel.json similarity index 100% rename from config/plots/MET_8TeV_electron_channel.json rename to dps/config/plots/MET_8TeV_electron_channel.json diff --git a/config/plots/MET_factorisation_scale_comparison_8TeV.json b/dps/config/plots/MET_factorisation_scale_comparison_8TeV.json similarity index 100% rename from config/plots/MET_factorisation_scale_comparison_8TeV.json rename to dps/config/plots/MET_factorisation_scale_comparison_8TeV.json diff --git a/config/plots/MET_matching_threshold_comparison_8TeV.json b/dps/config/plots/MET_matching_threshold_comparison_8TeV.json similarity index 100% rename from config/plots/MET_matching_threshold_comparison_8TeV.json rename to dps/config/plots/MET_matching_threshold_comparison_8TeV.json diff --git a/config/plots/MET_reco_fake_comparison_8TeV.json b/dps/config/plots/MET_reco_fake_comparison_8TeV.json similarity index 100% rename from config/plots/MET_reco_fake_comparison_8TeV.json rename to dps/config/plots/MET_reco_fake_comparison_8TeV.json diff --git a/config/plots/MET_uncertainties/compare_central_to_electron_energy_down_8TeV_asym_bin_electron_channel.json b/dps/config/plots/MET_uncertainties/compare_central_to_electron_energy_down_8TeV_asym_bin_electron_channel.json similarity index 100% rename from config/plots/MET_uncertainties/compare_central_to_electron_energy_down_8TeV_asym_bin_electron_channel.json rename to dps/config/plots/MET_uncertainties/compare_central_to_electron_energy_down_8TeV_asym_bin_electron_channel.json diff --git a/config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV.json b/dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV.json similarity index 100% rename from config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV.json rename to dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV.json diff --git a/config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV_asym_bin.json b/dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV_asym_bin.json similarity index 100% rename from config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV_asym_bin.json rename to dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_down_8TeV_asym_bin.json diff --git a/config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV.json b/dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV.json similarity index 100% rename from config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV.json rename to dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV.json diff --git a/config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV_asym_bin.json b/dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV_asym_bin.json similarity index 100% rename from config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV_asym_bin.json rename to dps/config/plots/MET_uncertainties/compare_central_to_tau_energy_up_8TeV_asym_bin.json diff --git a/config/plots/ST_reco_gen_truth_comparison_8TeV.json b/dps/config/plots/ST_reco_gen_truth_comparison_8TeV.json similarity index 100% rename from config/plots/ST_reco_gen_truth_comparison_8TeV.json rename to dps/config/plots/ST_reco_gen_truth_comparison_8TeV.json diff --git a/config/plots/mjj_13TeV_electron_channel.json b/dps/config/plots/mjj_13TeV_electron_channel.json similarity index 100% rename from config/plots/mjj_13TeV_electron_channel.json rename to dps/config/plots/mjj_13TeV_electron_channel.json diff --git a/config/plots/w_jets_7_and_8_tev_comparison.json b/dps/config/plots/w_jets_7_and_8_tev_comparison.json similarity index 100% rename from config/plots/w_jets_7_and_8_tev_comparison.json rename to dps/config/plots/w_jets_7_and_8_tev_comparison.json diff --git a/config/summations_7TeV.py b/dps/config/summations_7TeV.py similarity index 100% rename from config/summations_7TeV.py rename to dps/config/summations_7TeV.py diff --git a/config/summations_8TeV.py b/dps/config/summations_8TeV.py similarity index 100% rename from config/summations_8TeV.py rename to dps/config/summations_8TeV.py diff --git a/config/summations_common.py b/dps/config/summations_common.py similarity index 100% rename from config/summations_common.py rename to dps/config/summations_common.py diff --git a/config/unfold.py b/dps/config/unfold.py similarity index 100% rename from config/unfold.py rename to dps/config/unfold.py diff --git a/config/variableBranchNames.py b/dps/config/variableBranchNames.py similarity index 100% rename from config/variableBranchNames.py rename to dps/config/variableBranchNames.py diff --git a/config/variable_binning.py b/dps/config/variable_binning.py similarity index 100% rename from config/variable_binning.py rename to dps/config/variable_binning.py diff --git a/config/cross_section_config.py b/dps/config/xsection.py similarity index 98% rename from config/cross_section_config.py rename to dps/config/xsection.py index e2dc70ae..a46e0f7b 100644 --- a/config/cross_section_config.py +++ b/dps/config/xsection.py @@ -1,5 +1,5 @@ from __future__ import division -import tools.measurement +import dps.utils.measurement class XSectionConfig(): current_analysis_path = '/hdfs/TopQuarkGroup/run2/atOutput/' @@ -17,9 +17,6 @@ class XSectionConfig(): 'data_muon_category_templates', 'electron_QCD_MC_file', 'electron_control_region', 'electron_control_region_systematic', - 'fit_boundaries', - 'fit_variable_bin_width', - 'fit_variable_unit', 'general_category_templates', 'general_category_templates_trees', 'generator_systematic_vjets_templates', @@ -35,7 +32,7 @@ class XSectionConfig(): 'muon_control_region_systematic', 'new_luminosity', 'parameters', 'path_to_files', 'path_to_unfolding_histograms', 'rate_changing_systematics', - 'rebin', 'special_muon_histogram', 'translate_options', + 'special_muon_histogram', 'translate_options', 'ttbar_category_templates', 'ttbar_category_templates_trees', 'ttbar_generator_category_templates_trees', @@ -47,12 +44,9 @@ class XSectionConfig(): 'unfolding_matching_down', 'unfolding_matching_down_raw', 'unfolding_matching_up', 'unfolding_matching_up_raw', 'unfolding_mass_down', 'unfolding_mass_up', - 'unfolding_mcatnlo', 'unfolding_mcatnlo_raw', - 'unfolding_powheg_pythia', 'unfolding_powheg_pythia_raw', 'unfolding_powheg_herwig', 'unfolding_powheg_herwig_raw', 'unfolding_scale_down', 'unfolding_scale_down_raw', 'unfolding_scale_up', 'unfolding_scale_up_raw', - 'unfolding_ptreweight', 'unfolding_ptreweight_raw', 'unfolding_pdfweights', 'vjets_theory_systematic_prefix' ] @@ -294,20 +288,20 @@ def __fill_defaults__( self ): if 'QCD' in systematic: affected_samples = ['QCD'] - sp = tools.measurement.Systematic( + sp = dps.utils.measurement.Systematic( systematic + '+', # systematic + '_up', - stype = tools.measurement.Systematic.RATE, + stype = dps.utils.measurement.Systematic.RATE, affected_samples = affected_samples, scale = 1 + self.rate_changing_systematics[systematic], ) scale = 1 - self.rate_changing_systematics[systematic] if scale <= 0: scale = 10e-5 - sm = tools.measurement.Systematic( + sm = dps.utils.measurement.Systematic( systematic + '-', # systematic + '_down', - stype = tools.measurement.Systematic.RATE, + stype = dps.utils.measurement.Systematic.RATE, affected_samples = affected_samples, scale = scale, ) diff --git a/examples/Bin_Centers.py b/dps/examples/Bin_Centers.py similarity index 95% rename from examples/Bin_Centers.py rename to dps/examples/Bin_Centers.py index 482e7db5..6d05e509 100644 --- a/examples/Bin_Centers.py +++ b/dps/examples/Bin_Centers.py @@ -4,7 +4,7 @@ @author: kreczko ''' -from tools.datapoint_position import get_bin_centers, barycenters, calculate_correct_x_coordinates +from dps.utils.datapoint_position import get_bin_centers, barycenters, calculate_correct_x_coordinates from rootpy.io import File from rootpy.plotting import Hist, Graph import rootpy.plotting.root2matplotlib as rplt diff --git a/examples/CMSStyleMatplotlib.py b/dps/examples/CMSStyleMatplotlib.py similarity index 98% rename from examples/CMSStyleMatplotlib.py rename to dps/examples/CMSStyleMatplotlib.py index 47dd37af..0dfc48f3 100644 --- a/examples/CMSStyleMatplotlib.py +++ b/dps/examples/CMSStyleMatplotlib.py @@ -7,7 +7,7 @@ from rootpy.plotting import Hist import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt -from config import CMS +from dps.config import CMS CMS.axis_label_major['labelsize'] = 40 CMS.title['fontsize'] = 40 # create a normal distribution diff --git a/examples/Curvefitter_example.py b/dps/examples/Curvefitter_example.py similarity index 92% rename from examples/Curvefitter_example.py rename to dps/examples/Curvefitter_example.py index 5a20460c..67f808ea 100644 --- a/examples/Curvefitter_example.py +++ b/dps/examples/Curvefitter_example.py @@ -3,8 +3,8 @@ @author: kreczko ''' - -from tools.Fitting import CurveFit +# @BROKEN +from dps.utils.Fitting import CurveFit import numpy as np from rootpy.plotting import Hist import rootpy.plotting.root2matplotlib as rplt diff --git a/src/lepton_scale_factors/__init__.py b/dps/examples/__init__.py similarity index 100% rename from src/lepton_scale_factors/__init__.py rename to dps/examples/__init__.py diff --git a/examples/forest/README.md b/dps/examples/forest/README.md similarity index 100% rename from examples/forest/README.md rename to dps/examples/forest/README.md diff --git a/examples/forest/random_tree.py b/dps/examples/forest/random_tree.py similarity index 100% rename from examples/forest/random_tree.py rename to dps/examples/forest/random_tree.py diff --git a/examples/forest/read_ntuples.py b/dps/examples/forest/read_ntuples.py similarity index 97% rename from examples/forest/read_ntuples.py rename to dps/examples/forest/read_ntuples.py index 3db84a19..441f98c2 100644 --- a/examples/forest/read_ntuples.py +++ b/dps/examples/forest/read_ntuples.py @@ -8,7 +8,7 @@ from rootpy.plotting import Hist import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt -from tools.plotting import make_plot, Histogram_properties +from dps.utils.plotting import make_plot, Histogram_properties if __name__ == '__main__': # file available on soolin: diff --git a/examples/hist_with_dataMCRatio_example.py b/dps/examples/hist_with_dataMCRatio_example.py similarity index 98% rename from examples/hist_with_dataMCRatio_example.py rename to dps/examples/hist_with_dataMCRatio_example.py index 2af32caa..083738dc 100644 --- a/examples/hist_with_dataMCRatio_example.py +++ b/dps/examples/hist_with_dataMCRatio_example.py @@ -5,7 +5,7 @@ import matplotlib.pyplot as plt import ROOT import matplotlib.gridspec as gridspec -from config import CMS +from dps.config import CMS # Setting this to True (default in rootpy) # changes how the histograms look in ROOT... ROOT.TH1.SetDefaultSumw2(False) diff --git a/examples/hist_with_dataMCRatio_example_pylab.py b/dps/examples/hist_with_dataMCRatio_example_pylab.py similarity index 98% rename from examples/hist_with_dataMCRatio_example_pylab.py rename to dps/examples/hist_with_dataMCRatio_example_pylab.py index 0d8377fd..2e73b478 100644 --- a/examples/hist_with_dataMCRatio_example_pylab.py +++ b/dps/examples/hist_with_dataMCRatio_example_pylab.py @@ -4,7 +4,7 @@ import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt import ROOT -from config import CMS +from dps.config import CMS from pylab import subplot # Setting this to True (default in rootpy) # changes how the histograms look in ROOT... diff --git a/examples/matplotlib_hist_example.py b/dps/examples/matplotlib_hist_example.py similarity index 95% rename from examples/matplotlib_hist_example.py rename to dps/examples/matplotlib_hist_example.py index bbfeaecd..37316ddf 100644 --- a/examples/matplotlib_hist_example.py +++ b/dps/examples/matplotlib_hist_example.py @@ -6,7 +6,7 @@ import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt from matplotlib.ticker import AutoMinorLocator -from tools.plotting import Histogram_properties, make_data_mc_comparison_plot, make_control_region_comparison +from dps.utils.plotting import Histogram_properties, make_data_mc_comparison_plot, make_control_region_comparison import ROOT # Setting this to True (default in rootpy) diff --git a/examples/plot_config/data_mc_comparison.json b/dps/examples/plot_config/data_mc_comparison.json similarity index 100% rename from examples/plot_config/data_mc_comparison.json rename to dps/examples/plot_config/data_mc_comparison.json diff --git a/examples/plot_config/w_jets_7_and_8_tev_comparison.json b/dps/examples/plot_config/w_jets_7_and_8_tev_comparison.json similarity index 100% rename from examples/plot_config/w_jets_7_and_8_tev_comparison.json rename to dps/examples/plot_config/w_jets_7_and_8_tev_comparison.json diff --git a/examples/plot_hist.py b/dps/examples/plot_hist.py similarity index 100% rename from examples/plot_hist.py rename to dps/examples/plot_hist.py diff --git a/examples/plot_matplotlib_graph.py b/dps/examples/plot_matplotlib_graph.py similarity index 100% rename from examples/plot_matplotlib_graph.py rename to dps/examples/plot_matplotlib_graph.py diff --git a/examples/plot_matplotlib_hist.py b/dps/examples/plot_matplotlib_hist.py similarity index 100% rename from examples/plot_matplotlib_hist.py rename to dps/examples/plot_matplotlib_hist.py diff --git a/examples/plots/AsymBinsExample.png b/dps/examples/plots/AsymBinsExample.png similarity index 100% rename from examples/plots/AsymBinsExample.png rename to dps/examples/plots/AsymBinsExample.png diff --git a/examples/plots/Bin_Centers.png b/dps/examples/plots/Bin_Centers.png similarity index 100% rename from examples/plots/Bin_Centers.png rename to dps/examples/plots/Bin_Centers.png diff --git a/examples/plots/CMSStyleMatplotlib.png b/dps/examples/plots/CMSStyleMatplotlib.png similarity index 100% rename from examples/plots/CMSStyleMatplotlib.png rename to dps/examples/plots/CMSStyleMatplotlib.png diff --git a/examples/plots/CurveFit.png b/dps/examples/plots/CurveFit.png similarity index 100% rename from examples/plots/CurveFit.png rename to dps/examples/plots/CurveFit.png diff --git a/examples/plots/RooUnfoldBayesExample.png b/dps/examples/plots/RooUnfoldBayesExample.png similarity index 100% rename from examples/plots/RooUnfoldBayesExample.png rename to dps/examples/plots/RooUnfoldBayesExample.png diff --git a/examples/plots/RooUnfoldExample.png b/dps/examples/plots/RooUnfoldExample.png similarity index 100% rename from examples/plots/RooUnfoldExample.png rename to dps/examples/plots/RooUnfoldExample.png diff --git a/examples/roofit/rf101_basics.py b/dps/examples/roofit/rf101_basics.py similarity index 100% rename from examples/roofit/rf101_basics.py rename to dps/examples/roofit/rf101_basics.py diff --git a/examples/roofit/rf501_simultaneouspdf.py b/dps/examples/roofit/rf501_simultaneouspdf.py similarity index 100% rename from examples/roofit/rf501_simultaneouspdf.py rename to dps/examples/roofit/rf501_simultaneouspdf.py diff --git a/examples/roofit/rf501_simultaneouspdf_with_data.py b/dps/examples/roofit/rf501_simultaneouspdf_with_data.py similarity index 100% rename from examples/roofit/rf501_simultaneouspdf_with_data.py rename to dps/examples/roofit/rf501_simultaneouspdf_with_data.py diff --git a/examples/roofit/roofit_advanced.py b/dps/examples/roofit/roofit_advanced.py similarity index 100% rename from examples/roofit/roofit_advanced.py rename to dps/examples/roofit/roofit_advanced.py diff --git a/examples/roofit/roofit_beginner.py b/dps/examples/roofit/roofit_beginner.py similarity index 100% rename from examples/roofit/roofit_beginner.py rename to dps/examples/roofit/roofit_beginner.py diff --git a/examples/roofit/roofit_simultanous.py b/dps/examples/roofit/roofit_simultanous.py similarity index 100% rename from examples/roofit/roofit_simultanous.py rename to dps/examples/roofit/roofit_simultanous.py diff --git a/examples/roofit/roofit_simultanous_all_data.py b/dps/examples/roofit/roofit_simultanous_all_data.py similarity index 100% rename from examples/roofit/roofit_simultanous_all_data.py rename to dps/examples/roofit/roofit_simultanous_all_data.py diff --git a/examples/root_std_map.py b/dps/examples/root_std_map.py similarity index 100% rename from examples/root_std_map.py rename to dps/examples/root_std_map.py diff --git a/examples/rootpy_asym_bins.py b/dps/examples/rootpy_asym_bins.py similarity index 100% rename from examples/rootpy_asym_bins.py rename to dps/examples/rootpy_asym_bins.py diff --git a/examples/rootpy_graph_bug_example.py b/dps/examples/rootpy_graph_bug_example.py similarity index 100% rename from examples/rootpy_graph_bug_example.py rename to dps/examples/rootpy_graph_bug_example.py diff --git a/examples/tmva/create_sample.py b/dps/examples/tmva/create_sample.py similarity index 100% rename from examples/tmva/create_sample.py rename to dps/examples/tmva/create_sample.py diff --git a/examples/tmva/tmva_merge.py b/dps/examples/tmva/tmva_merge.py similarity index 100% rename from examples/tmva/tmva_merge.py rename to dps/examples/tmva/tmva_merge.py diff --git a/examples/tmva/tmva_read.py b/dps/examples/tmva/tmva_read.py similarity index 100% rename from examples/tmva/tmva_read.py rename to dps/examples/tmva/tmva_read.py diff --git a/examples/tmva/tmva_train.py b/dps/examples/tmva/tmva_train.py similarity index 100% rename from examples/tmva/tmva_train.py rename to dps/examples/tmva/tmva_train.py diff --git a/experimental/Config.py b/dps/experimental/Config.py similarity index 100% rename from experimental/Config.py rename to dps/experimental/Config.py diff --git a/experimental/DougsBTagEff/makeBTagEfficiencies.py b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py similarity index 100% rename from experimental/DougsBTagEff/makeBTagEfficiencies.py rename to dps/experimental/DougsBTagEff/makeBTagEfficiencies.py diff --git a/experimental/LAMBSFitter.C b/dps/experimental/LAMBSFitter.C similarity index 100% rename from experimental/LAMBSFitter.C rename to dps/experimental/LAMBSFitter.C diff --git a/experimental/README.md b/dps/experimental/README.md similarity index 100% rename from experimental/README.md rename to dps/experimental/README.md diff --git a/experimental/TMatrix.py b/dps/experimental/TMatrix.py similarity index 100% rename from experimental/TMatrix.py rename to dps/experimental/TMatrix.py diff --git a/experimental/W_studies/fitWPeak.py b/dps/experimental/W_studies/fitWPeak.py similarity index 100% rename from experimental/W_studies/fitWPeak.py rename to dps/experimental/W_studies/fitWPeak.py diff --git a/experimental/W_studies/fitWPeak_fromTree.py b/dps/experimental/W_studies/fitWPeak_fromTree.py similarity index 100% rename from experimental/W_studies/fitWPeak_fromTree.py rename to dps/experimental/W_studies/fitWPeak_fromTree.py diff --git a/src/search/__init__.py b/dps/experimental/__init__.py similarity index 100% rename from src/search/__init__.py rename to dps/experimental/__init__.py diff --git a/experimental/add_control_region.py b/dps/experimental/add_control_region.py similarity index 98% rename from experimental/add_control_region.py rename to dps/experimental/add_control_region.py index 024f6fd2..bfb5f2be 100644 --- a/experimental/add_control_region.py +++ b/dps/experimental/add_control_region.py @@ -4,7 +4,7 @@ @author: kreczko ''' from rootpy.io.file import root_open, File -from tools.ROOT_utils import root_mkdir +from dps.utils.ROOT_utils import root_mkdir import shutil import subprocess diff --git a/experimental/condor/01_fit.description b/dps/experimental/condor/01_fit.description similarity index 88% rename from experimental/condor/01_fit.description rename to dps/experimental/condor/01_fit.description index addf735c..5cc89249 100644 --- a/experimental/condor/01_fit.description +++ b/dps/experimental/condor/01_fit.description @@ -1,4 +1,4 @@ -Executable = experimental/condor/run_01.sh +Executable = dps/experimental/condor/run_01.sh Universe = vanilla Output = 01.job.$(cluster).$(process).out Error = 01.job.$(cluster).$(process).err diff --git a/experimental/condor/01b/01_fit.description b/dps/experimental/condor/01b/01_fit.description similarity index 87% rename from experimental/condor/01b/01_fit.description rename to dps/experimental/condor/01b/01_fit.description index f56b9ab3..dc14f64f 100644 --- a/experimental/condor/01b/01_fit.description +++ b/dps/experimental/condor/01b/01_fit.description @@ -1,4 +1,4 @@ -Executable = experimental/condor/01b/run_01.sh +Executable = dps/experimental/condor/01b/run_01.sh Universe = vanilla Output = 01b.job.$(cluster).$(process).out Error = 01b.job.$(cluster).$(process).err diff --git a/experimental/condor/01b/run01_forAllOptions.py b/dps/experimental/condor/01b/run01_forAllOptions.py similarity index 89% rename from experimental/condor/01b/run01_forAllOptions.py rename to dps/experimental/condor/01b/run01_forAllOptions.py index 4b538161..d34e29a9 100644 --- a/experimental/condor/01b/run01_forAllOptions.py +++ b/dps/experimental/condor/01b/run01_forAllOptions.py @@ -37,4 +37,4 @@ jobOption = jobOptions[options.jobNumber] print 'Running with options : ',jobOption -os.system('python src/cross_section_measurement/01_get_ttjet_normalisation.py %s' % jobOption ) +os.system('python dps/analysis/xsection/01_get_ttjet_normalisation.py %s' % jobOption ) diff --git a/experimental/condor/run_01.sh b/dps/experimental/condor/01b/run_01.sh similarity index 91% rename from experimental/condor/run_01.sh rename to dps/experimental/condor/01b/run_01.sh index d2303cde..50d7ea31 100644 --- a/experimental/condor/run_01.sh +++ b/dps/experimental/condor/01b/run_01.sh @@ -15,7 +15,7 @@ echo "DailyPythonScripts are set up" echo "Running payload" >&2 echo "Running payload" -time python experimental/condor/run01_forAllOptions.py -n $1 +time python dps/experimental/condor/run01_forAllOptions.py -n $1 echo "Done" ls diff --git a/experimental/condor/run01_forAllOptions.py b/dps/experimental/condor/run01_forAllOptions.py similarity index 87% rename from experimental/condor/run01_forAllOptions.py rename to dps/experimental/condor/run01_forAllOptions.py index 1bf0cd2c..1b6c14b6 100644 --- a/experimental/condor/run01_forAllOptions.py +++ b/dps/experimental/condor/run01_forAllOptions.py @@ -27,4 +27,4 @@ jobOption = jobOptions[options.jobNumber] print 'Running with options : ',jobOption -os.system('python src/cross_section_measurement/01_get_fit_results.py %s' % jobOption ) +os.system('python dps/analysis/xsection/01_get_fit_results.py %s' % jobOption ) diff --git a/experimental/condor/01b/run_01.sh b/dps/experimental/condor/run_01.sh similarity index 100% rename from experimental/condor/01b/run_01.sh rename to dps/experimental/condor/run_01.sh diff --git a/experimental/createCutflows.py b/dps/experimental/createCutflows.py similarity index 100% rename from experimental/createCutflows.py rename to dps/experimental/createCutflows.py diff --git a/experimental/createSFWeightPlots.py b/dps/experimental/createSFWeightPlots.py similarity index 100% rename from experimental/createSFWeightPlots.py rename to dps/experimental/createSFWeightPlots.py diff --git a/experimental/filterNtuple.py b/dps/experimental/filterNtuple.py similarity index 100% rename from experimental/filterNtuple.py rename to dps/experimental/filterNtuple.py diff --git a/experimental/fitTest/initial_values_electron_eta.py b/dps/experimental/fitTest/initial_values_electron_eta.py similarity index 100% rename from experimental/fitTest/initial_values_electron_eta.py rename to dps/experimental/fitTest/initial_values_electron_eta.py diff --git a/experimental/fitTest/initial_values_electron_eta_afterSimFit.py b/dps/experimental/fitTest/initial_values_electron_eta_afterSimFit.py similarity index 100% rename from experimental/fitTest/initial_values_electron_eta_afterSimFit.py rename to dps/experimental/fitTest/initial_values_electron_eta_afterSimFit.py diff --git a/experimental/fitTest/nesSimpleSimFit.py b/dps/experimental/fitTest/nesSimpleSimFit.py similarity index 99% rename from experimental/fitTest/nesSimpleSimFit.py rename to dps/experimental/fitTest/nesSimpleSimFit.py index 0142a345..02c62816 100644 --- a/experimental/fitTest/nesSimpleSimFit.py +++ b/dps/experimental/fitTest/nesSimpleSimFit.py @@ -4,7 +4,7 @@ @author: clement ''' import unittest -from tools.Fitting import Minuit, FitData, FitDataCollection +from dps.utils.Fitting import Minuit, FitData, FitDataCollection from rootpy.plotting import Hist, Canvas, Legend from math import sqrt from ROOT import TH1, gStyle, TVirtualPad, gROOT diff --git a/experimental/fitTest/newSimpleFitTest.py b/dps/experimental/fitTest/newSimpleFitTest.py similarity index 99% rename from experimental/fitTest/newSimpleFitTest.py rename to dps/experimental/fitTest/newSimpleFitTest.py index 2f57bed2..c87d7951 100644 --- a/experimental/fitTest/newSimpleFitTest.py +++ b/dps/experimental/fitTest/newSimpleFitTest.py @@ -4,7 +4,7 @@ @author: clement ''' import unittest -from tools.Fitting import Minuit, FitData, FitDataCollection +from dps.utils.Fitting import Minuit, FitData, FitDataCollection from rootpy.plotting import Hist, Canvas, Legend from math import sqrt from ROOT import TH1, gStyle, TVirtualPad diff --git a/experimental/fitTest/templates_electron.py b/dps/experimental/fitTest/templates_electron.py similarity index 100% rename from experimental/fitTest/templates_electron.py rename to dps/experimental/fitTest/templates_electron.py diff --git a/experimental/fitTest/templates_electron_eta.py b/dps/experimental/fitTest/templates_electron_eta.py similarity index 100% rename from experimental/fitTest/templates_electron_eta.py rename to dps/experimental/fitTest/templates_electron_eta.py diff --git a/experimental/getSkimEfficiency.py b/dps/experimental/getSkimEfficiency.py similarity index 95% rename from experimental/getSkimEfficiency.py rename to dps/experimental/getSkimEfficiency.py index abc286a2..5be0ccb3 100644 --- a/experimental/getSkimEfficiency.py +++ b/dps/experimental/getSkimEfficiency.py @@ -5,7 +5,7 @@ from ROOT import * import glob from legacy.fileInfo import getROOTFiles -from tools.ROOT_utils import get_histogram_from_file +from dps.utils.ROOT_utils import get_histogram_from_file pathToSkimHist = "topPairEPlusJetsSelectionAnalyser/consecutiveCuts_unweighted" diff --git a/experimental/getSystematicSummary.py b/dps/experimental/getSystematicSummary.py similarity index 88% rename from experimental/getSystematicSummary.py rename to dps/experimental/getSystematicSummary.py index d660c5dc..e255ef8e 100644 --- a/experimental/getSystematicSummary.py +++ b/dps/experimental/getSystematicSummary.py @@ -1,8 +1,9 @@ -from config import XSectionConfig +from dps.config.xsection import XSectionConfig from copy import deepcopy -from tools.file_utilities import read_xsection_measurement_results_with_errors -from config.variable_binning import variable_bins_latex -from tools.Calculation import calculate_lower_and_upper_PDFuncertainty, calculate_lower_and_upper_systematics +from dps.utils.file_utilities import read_xsection_measurement_results_with_errors +from dps.config.variable_binning import variable_bins_latex +# @BROKEN +from dps.utils.Calculation import calculate_lower_and_upper_PDFuncertainty, calculate_lower_and_upper_systematics from numpy import median measurement_config = XSectionConfig( 13 ) diff --git a/experimental/hist_to_numpy.py b/dps/experimental/hist_to_numpy.py similarity index 100% rename from experimental/hist_to_numpy.py rename to dps/experimental/hist_to_numpy.py diff --git a/experimental/howMuchLumiInNtuples.py b/dps/experimental/howMuchLumiInNtuples.py similarity index 100% rename from experimental/howMuchLumiInNtuples.py rename to dps/experimental/howMuchLumiInNtuples.py diff --git a/experimental/makeFitTable.py b/dps/experimental/makeFitTable.py similarity index 94% rename from experimental/makeFitTable.py rename to dps/experimental/makeFitTable.py index 0f27c6d7..406a99cf 100644 --- a/experimental/makeFitTable.py +++ b/dps/experimental/makeFitTable.py @@ -1,6 +1,7 @@ -from src.cross_section_measurement.lib import read_normalisation, read_fit_templates, read_initial_normalisation, closure_tests -from config.variable_binning import bin_edges -from config.latex_labels import samples_latex +from dps.analysis.xsection.lib import read_normalisation, read_fit_templates, read_initial_normalisation, closure_tests +# @BROKEN +from dps.config.variable_binning import bin_edges +from dps.config.latex_labels import samples_latex # All possible options diff --git a/experimental/make_closure_test_plots.py b/dps/experimental/make_closure_test_plots.py similarity index 95% rename from experimental/make_closure_test_plots.py rename to dps/experimental/make_closure_test_plots.py index f5b3f904..efa3d76c 100644 --- a/experimental/make_closure_test_plots.py +++ b/dps/experimental/make_closure_test_plots.py @@ -1,6 +1,6 @@ -from src.cross_section_measurement.lib import read_normalisation, read_fit_templates, read_initial_normalisation, closure_tests +from dps.analysis.xsection.lib import read_normalisation, read_fit_templates, read_initial_normalisation, closure_tests from rootpy.plotting import Hist -from tools.plotting import make_data_mc_comparison_plot, Histogram_properties +from dps.utils.plotting import make_data_mc_comparison_plot, Histogram_properties fit_variable_properties = { diff --git a/experimental/mergeJob.sh b/dps/experimental/mergeJob.sh similarity index 100% rename from experimental/mergeJob.sh rename to dps/experimental/mergeJob.sh diff --git a/experimental/mergeROOTFilesWithCompression.py b/dps/experimental/mergeROOTFilesWithCompression.py similarity index 100% rename from experimental/mergeROOTFilesWithCompression.py rename to dps/experimental/mergeROOTFilesWithCompression.py diff --git a/experimental/merge_samples_7TeV.py b/dps/experimental/merge_samples_7TeV.py similarity index 94% rename from experimental/merge_samples_7TeV.py rename to dps/experimental/merge_samples_7TeV.py index b6b6b2d5..8ea519d7 100644 --- a/experimental/merge_samples_7TeV.py +++ b/dps/experimental/merge_samples_7TeV.py @@ -1,8 +1,8 @@ -from config.summations_7TeV import sample_summations -from config import XSectionConfig -from tools.file_utilities import make_folder_if_not_exists +from dps.config.summations_7TeV import sample_summations +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists -from tools.file_utilities import merge_ROOT_files +from dps.utils.file_utilities import merge_ROOT_files import os import subprocess import time diff --git a/experimental/merge_samples_8TeV.py b/dps/experimental/merge_samples_8TeV.py similarity index 94% rename from experimental/merge_samples_8TeV.py rename to dps/experimental/merge_samples_8TeV.py index 6729c5f8..d1fddb7f 100644 --- a/experimental/merge_samples_8TeV.py +++ b/dps/experimental/merge_samples_8TeV.py @@ -1,8 +1,8 @@ -from config.summations_8TeV import sample_summations -from config import XSectionConfig -from tools.file_utilities import make_folder_if_not_exists +from dps.config.summations_8TeV import sample_summations +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists -from tools.file_utilities import merge_ROOT_files +from dps.utils.file_utilities import merge_ROOT_files import os import subprocess import time diff --git a/experimental/merge_samples_onDICE.py b/dps/experimental/merge_samples_onDICE.py similarity index 93% rename from experimental/merge_samples_onDICE.py rename to dps/experimental/merge_samples_onDICE.py index 6f9d3dac..9f4d7f2b 100755 --- a/experimental/merge_samples_onDICE.py +++ b/dps/experimental/merge_samples_onDICE.py @@ -1,15 +1,14 @@ #!/usr/bin/env python -from config.summations_7TeV import sample_summations as sample_summations_7TeV -from config.summations_8TeV import sample_summations as sample_summations_8TeV -from config import XSectionConfig -from tools.file_utilities import make_folder_if_not_exists +from dps.config.summations_7TeV import sample_summations as sample_summations_7TeV +from dps.config.summations_8TeV import sample_summations as sample_summations_8TeV +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists from optparse import OptionParser -from tools.file_utilities import merge_ROOT_files +from dps.utils.file_utilities import merge_ROOT_files import os import sys import subprocess -import time parser = OptionParser("Merge histogram files on DICE") parser.add_option("-n", dest="jobNumber", default=-1, type='int', diff --git a/experimental/minuit.py b/dps/experimental/minuit.py similarity index 96% rename from experimental/minuit.py rename to dps/experimental/minuit.py index eea0ba27..4758bd27 100644 --- a/experimental/minuit.py +++ b/dps/experimental/minuit.py @@ -1,5 +1,5 @@ from iminuit import describe -from tools.Fitting import IMinuit, FitData, FitDataCollection +from dps.utils.Fitting import IMinuit, FitData, FitDataCollection from rootpy.plotting import Hist import numpy as np diff --git a/experimental/move_BAT_output_files_to_hdfs.py b/dps/experimental/move_BAT_output_files_to_hdfs.py similarity index 96% rename from experimental/move_BAT_output_files_to_hdfs.py rename to dps/experimental/move_BAT_output_files_to_hdfs.py index 19ca0461..39c73d52 100644 --- a/experimental/move_BAT_output_files_to_hdfs.py +++ b/dps/experimental/move_BAT_output_files_to_hdfs.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from config import XSectionConfig -from tools.file_utilities import make_folder_if_not_exists +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import make_folder_if_not_exists from optparse import OptionParser import os diff --git a/experimental/parsing_script.py b/dps/experimental/parsing_script.py similarity index 100% rename from experimental/parsing_script.py rename to dps/experimental/parsing_script.py diff --git a/experimental/parsing_script_events_number.py b/dps/experimental/parsing_script_events_number.py similarity index 100% rename from experimental/parsing_script_events_number.py rename to dps/experimental/parsing_script_events_number.py diff --git a/experimental/plotCrossSections.py b/dps/experimental/plotCrossSections.py similarity index 98% rename from experimental/plotCrossSections.py rename to dps/experimental/plotCrossSections.py index d5bfe511..df170dc0 100644 --- a/experimental/plotCrossSections.py +++ b/dps/experimental/plotCrossSections.py @@ -1,10 +1,11 @@ from optparse import OptionParser -import tools.plotting_utilities as plotting +import dps.utils.plotting_utilities as plotting import os -from config.variable_binning_8TeV import bin_edges, variable_bins_ROOT, eta_bin_edges -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists -from tools.hist_utilities import value_error_tuplelist_to_hist, value_tuplelist_to_hist -from tools.Calculation import calculateTotalUncertainty, symmetriseErrors +# @BROKEN +from dps.config.variable_binning_8TeV import bin_edges, variable_bins_ROOT, eta_bin_edges +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.hist_utilities import value_error_tuplelist_to_hist, value_tuplelist_to_hist +from dps.utils.Calculation import calculateTotalUncertainty, symmetriseErrors import ROOT from ROOT import TPaveText, kRed, TH1F, Double, TMinuit, Long, kGreen, gROOT, TCanvas, kMagenta, kBlue, TGraphAsymmErrors, TMath from ROOT import kAzure, kYellow, kViolet, THStack, gStyle diff --git a/experimental/plot_PU_weights.py b/dps/experimental/plot_PU_weights.py similarity index 100% rename from experimental/plot_PU_weights.py rename to dps/experimental/plot_PU_weights.py diff --git a/experimental/printCrossSections.py b/dps/experimental/printCrossSections.py similarity index 96% rename from experimental/printCrossSections.py rename to dps/experimental/printCrossSections.py index 12c1a779..647d2e78 100644 --- a/experimental/printCrossSections.py +++ b/dps/experimental/printCrossSections.py @@ -1,9 +1,10 @@ import ROOT from optparse import OptionParser -from config.variable_binning_8TeV import variable_bins_ROOT, variable_bins_latex -from config.met_systematics import metsystematics_sources, metsystematics_sources_latex -from tools.Calculation import getRelativeError, symmetriseErrors, calculateTotalUncertainty -from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists +# @BROKEN +from dps.config.variable_binning_8TeV import variable_bins_ROOT, variable_bins_latex +from dps.config.met_systematics import metsystematics_sources, metsystematics_sources_latex +from dps.utils.Calculation import getRelativeError, symmetriseErrors, calculateTotalUncertainty +from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists from math import sqrt categories = [ 'central', 'matchingup', 'matchingdown', 'scaleup', 'scaledown', 'BJet_down', 'BJet_up', 'JES_down', 'JES_up', 'LightJet_down', 'LightJet_up', 'PU_down', 'PU_up' ] diff --git a/experimental/probfit/basics.py b/dps/experimental/probfit/basics.py similarity index 100% rename from experimental/probfit/basics.py rename to dps/experimental/probfit/basics.py diff --git a/experimental/probfit/test.py b/dps/experimental/probfit/test.py similarity index 100% rename from experimental/probfit/test.py rename to dps/experimental/probfit/test.py diff --git a/experimental/roofit_expert.py b/dps/experimental/roofit_expert.py similarity index 98% rename from experimental/roofit_expert.py rename to dps/experimental/roofit_expert.py index 53b5b27c..a2c1ef88 100644 --- a/experimental/roofit_expert.py +++ b/dps/experimental/roofit_expert.py @@ -6,11 +6,11 @@ from rootpy.io import File from ROOT import RooFit, RooRealVar, RooDataHist, RooArgList, RooHistPdf, RooArgSet, RooAddPdf from ROOT import RooChi2Var, RooFormulaVar, RooMinuit, TCanvas, RooPlot, RooGaussian, RooProdPdf, RooLinkedList -from config.variable_binning import variable_bins_ROOT -from tools.Calculation import decombine_result +from dps.config.variable_binning import variable_bins_ROOT +from dps.utils.Calculation import decombine_result from uncertainties import ufloat -from config import XSectionConfig -from config.summations_common import b_tag_summations +from dps.config.xsection import XSectionConfig +from dps.config.summations_common import b_tag_summations # copied from 01_get_fit_results.py def get_histogram(input_file, histogram_path, b_tag_bin=''): @@ -163,7 +163,7 @@ def get_fitted_normalisation_from_ROOT(channel, input_files, variable, met_type, leptonAbsEta = RooRealVar("leptonAbsEta", "leptonAbsEta", 0., 2.4) - # this has to move to tools/Fitting.py + # this has to move to dps.utils.Fitting.py vars = RooArgList() vars.add(leptonAbsEta) vars_set = RooArgSet() diff --git a/experimental/roofit_god.py b/dps/experimental/roofit_god.py similarity index 100% rename from experimental/roofit_god.py rename to dps/experimental/roofit_god.py diff --git a/experimental/rootpy_converty_hist_type.py b/dps/experimental/rootpy_converty_hist_type.py similarity index 100% rename from experimental/rootpy_converty_hist_type.py rename to dps/experimental/rootpy_converty_hist_type.py diff --git a/experimental/scale_mc.py b/dps/experimental/scale_mc.py similarity index 89% rename from experimental/scale_mc.py rename to dps/experimental/scale_mc.py index e88d18a6..e4fed553 100644 --- a/experimental/scale_mc.py +++ b/dps/experimental/scale_mc.py @@ -8,9 +8,9 @@ Assume that the first part of the name is the process in question (and implement exceptions) ''' from argparse import ArgumentParser -from tools.file_utilities import get_process_from_file -from config.dataset_info_7TeV import dataset_info as dataset_info_7TeV -from config.dataset_info_8TeV import dataset_info as dataset_info_8TeV +from dps.utils.file_utilities import get_process_from_file +from dps.config.dataset_info_7TeV import dataset_info as dataset_info_7TeV +from dps.config.dataset_info_8TeV import dataset_info as dataset_info_8TeV def scale_file(file_in_path): process_name = get_process_from_file(file_in_path) diff --git a/experimental/submitMerge.description b/dps/experimental/submitMerge.description similarity index 100% rename from experimental/submitMerge.description rename to dps/experimental/submitMerge.description diff --git a/experimental/sum_files_and_histograms.py b/dps/experimental/sum_files_and_histograms.py similarity index 98% rename from experimental/sum_files_and_histograms.py rename to dps/experimental/sum_files_and_histograms.py index 3aaa8554..1bed45a6 100644 --- a/experimental/sum_files_and_histograms.py +++ b/dps/experimental/sum_files_and_histograms.py @@ -12,7 +12,7 @@ from rootpy.logger import logging from ROOT import TFile, gROOT from argparse import ArgumentParser -from tools.file_utilities import make_folder_if_not_exists, get_files_in_path, merge_ROOT_files, get_process_from_file +from dps.utils.file_utilities import make_folder_if_not_exists, get_files_in_path, merge_ROOT_files, get_process_from_file File = TFile.Open gcd = gROOT.cd diff --git a/experimental/sums.py b/dps/experimental/sums.py similarity index 96% rename from experimental/sums.py rename to dps/experimental/sums.py index 4caf585c..3a08dfc9 100644 --- a/experimental/sums.py +++ b/dps/experimental/sums.py @@ -3,7 +3,7 @@ @author: phxlk ''' -from tools.file_utilities import read_data_from_JSON +from dps.utils.file_utilities import read_data_from_JSON if __name__ == '__main__': JSON_input_file = 'data/absolute_eta_M3_angle_bl/7TeV/HT/fit_results/central/fit_results_muon_patType1CorrectedPFMet.txt' diff --git a/experimental/tau_value_determination.py b/dps/experimental/tau_value_determination.py similarity index 96% rename from experimental/tau_value_determination.py rename to dps/experimental/tau_value_determination.py index 8e4277cd..ef92e809 100644 --- a/experimental/tau_value_determination.py +++ b/dps/experimental/tau_value_determination.py @@ -19,13 +19,14 @@ from copy import deepcopy from ROOT import Double, TH1F, TGraph -from config.variable_binning import bin_edges -from tools.file_utilities import read_data_from_JSON -from tools.hist_utilities import value_error_tuplelist_to_hist -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -from tools.ROOT_utils import set_root_defaults +# @BROKEN +from dps.config.variable_binning import bin_edges +from dps.utils.file_utilities import read_data_from_JSON +from dps.utils.hist_utilities import value_error_tuplelist_to_hist +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.utils.ROOT_utils import set_root_defaults # from examples.Bin_Centers import nbins -from config import XSectionConfig +from dps.config.xsection import XSectionConfig used_k = 2 font = {'family' : 'normal', diff --git a/experimental/testJson.txt b/dps/experimental/testJson.txt similarity index 100% rename from experimental/testJson.txt rename to dps/experimental/testJson.txt diff --git a/experimental/ttbar_reco.py b/dps/experimental/ttbar_reco.py similarity index 100% rename from experimental/ttbar_reco.py rename to dps/experimental/ttbar_reco.py diff --git a/experimental/unfoldAndMeasure_2012.py b/dps/experimental/unfoldAndMeasure_2012.py similarity index 97% rename from experimental/unfoldAndMeasure_2012.py rename to dps/experimental/unfoldAndMeasure_2012.py index 2d98378c..56146897 100644 --- a/experimental/unfoldAndMeasure_2012.py +++ b/dps/experimental/unfoldAndMeasure_2012.py @@ -9,12 +9,12 @@ from rootpy.io import File from rootpy.plotting import Hist, Hist2D # DailyPythonScripts -from config.variable_binning_8TeV import bin_widths, bin_edges -from tools.Calculation import calculate_xsection, calculate_normalised_xsection -from tools.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist -from tools.Unfolding import Unfolding -from tools.file_utilities import read_data_from_JSON, write_data_to_JSON, make_folder_if_not_exists -import config.RooUnfold as unfoldCfg +from dps.config.variable_binning_8TeV import bin_widths, bin_edges +from dps.utils.Calculation import calculate_xsection, calculate_normalised_xsection +from dps.utils.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist +from dps.utils.Unfolding import Unfolding +from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON, make_folder_if_not_exists +import dps.config.RooUnfold as unfoldCfg luminosity = 5814 ttbar_xsection = 225.19 diff --git a/experimental/unfolding_pull_parallel.py b/dps/experimental/unfolding_pull_parallel.py similarity index 97% rename from experimental/unfolding_pull_parallel.py rename to dps/experimental/unfolding_pull_parallel.py index 449c6bc0..0c5065e5 100644 --- a/experimental/unfolding_pull_parallel.py +++ b/dps/experimental/unfolding_pull_parallel.py @@ -7,9 +7,9 @@ import os from rootpy.io import File from array import array -from tools.Unfolding import Unfolding -from config import RooUnfold -from tools.file_utilities import write_data_to_JSON +from dps.utils.Unfolding import Unfolding +from dps.config import RooUnfold +from dps.utils.file_utilities import write_data_to_JSON import multiprocessing def check_multiple_data_multiple_unfolding(input_file, method, channel): diff --git a/legacy/FILES.py b/dps/legacy/FILES.py similarity index 100% rename from legacy/FILES.py rename to dps/legacy/FILES.py diff --git a/legacy/HistGetter.py b/dps/legacy/HistGetter.py similarity index 100% rename from legacy/HistGetter.py rename to dps/legacy/HistGetter.py diff --git a/legacy/HistPlotter.py b/dps/legacy/HistPlotter.py similarity index 100% rename from legacy/HistPlotter.py rename to dps/legacy/HistPlotter.py diff --git a/legacy/PlotHelper.py b/dps/legacy/PlotHelper.py similarity index 100% rename from legacy/PlotHelper.py rename to dps/legacy/PlotHelper.py diff --git a/legacy/QCDEstimation.py b/dps/legacy/QCDEstimation.py similarity index 100% rename from legacy/QCDEstimation.py rename to dps/legacy/QCDEstimation.py diff --git a/legacy/QCDRateEstimation.py b/dps/legacy/QCDRateEstimation.py similarity index 99% rename from legacy/QCDRateEstimation.py rename to dps/legacy/QCDRateEstimation.py index bf65db73..c4aac2a2 100644 --- a/legacy/QCDRateEstimation.py +++ b/dps/legacy/QCDRateEstimation.py @@ -14,8 +14,8 @@ ''' from __future__ import division from math import sqrt -import tools.ROOTFileReader as FileReader -import tools.PlottingUtilities as plotting +import dps.utils.ROOTFileReader as FileReader +import dps.utils.PlottingUtilities as plotting import FILES try: from uncertainties import ufloat diff --git a/legacy/QCDShapeExtraction.py b/dps/legacy/QCDShapeExtraction.py similarity index 100% rename from legacy/QCDShapeExtraction.py rename to dps/legacy/QCDShapeExtraction.py diff --git a/legacy/StoRMStorageDump.py b/dps/legacy/StoRMStorageDump.py similarity index 100% rename from legacy/StoRMStorageDump.py rename to dps/legacy/StoRMStorageDump.py diff --git a/src/unfolding_tests/__init__.py b/dps/legacy/__init__.py similarity index 100% rename from src/unfolding_tests/__init__.py rename to dps/legacy/__init__.py diff --git a/legacy/animate_gaus.sh b/dps/legacy/animate_gaus.sh similarity index 100% rename from legacy/animate_gaus.sh rename to dps/legacy/animate_gaus.sh diff --git a/legacy/bTagPlots.py b/dps/legacy/bTagPlots.py similarity index 100% rename from legacy/bTagPlots.py rename to dps/legacy/bTagPlots.py diff --git a/legacy/calculateRatio.py b/dps/legacy/calculateRatio.py similarity index 100% rename from legacy/calculateRatio.py rename to dps/legacy/calculateRatio.py diff --git a/legacy/combineResults.py b/dps/legacy/combineResults.py similarity index 100% rename from legacy/combineResults.py rename to dps/legacy/combineResults.py diff --git a/legacy/compareTriggersForQCDControlRegion.py b/dps/legacy/compareTriggersForQCDControlRegion.py similarity index 98% rename from legacy/compareTriggersForQCDControlRegion.py rename to dps/legacy/compareTriggersForQCDControlRegion.py index 40def1f4..a939406b 100644 --- a/legacy/compareTriggersForQCDControlRegion.py +++ b/dps/legacy/compareTriggersForQCDControlRegion.py @@ -4,8 +4,8 @@ #Step 4: Profit import FILES -import tools.ROOTFileReader as FileReader -import tools.PlottingUtilities as plotting +import dps.utils.ROOTFileReader as FileReader +import dps.utils.PlottingUtilities as plotting from ROOT import * gROOT.SetBatch(True) gROOT.ProcessLine('gErrorIgnoreLevel = 5001;') diff --git a/src/unfolding_tests/tests/__init__.py b/dps/legacy/config/__init__.py similarity index 100% rename from src/unfolding_tests/tests/__init__.py rename to dps/legacy/config/__init__.py diff --git a/legacy/config/sampleSummations.py b/dps/legacy/config/sampleSummations.py similarity index 100% rename from legacy/config/sampleSummations.py rename to dps/legacy/config/sampleSummations.py diff --git a/legacy/copyGridFolder b/dps/legacy/copyGridFolder similarity index 100% rename from legacy/copyGridFolder rename to dps/legacy/copyGridFolder diff --git a/legacy/copyNTuples.py b/dps/legacy/copyNTuples.py similarity index 100% rename from legacy/copyNTuples.py rename to dps/legacy/copyNTuples.py diff --git a/legacy/createPUDistribution.py b/dps/legacy/createPUDistribution.py similarity index 100% rename from legacy/createPUDistribution.py rename to dps/legacy/createPUDistribution.py diff --git a/legacy/createSumsOfHistograms.py b/dps/legacy/createSumsOfHistograms.py similarity index 99% rename from legacy/createSumsOfHistograms.py rename to dps/legacy/createSumsOfHistograms.py index 865d172f..aec2bf98 100644 --- a/legacy/createSumsOfHistograms.py +++ b/dps/legacy/createSumsOfHistograms.py @@ -5,7 +5,7 @@ import FILES from ROOT import TFile from rootpy.io import File -import tools.ROOTFileReader as fileReader +import dps.utils.ROOTFileReader as fileReader from optparse import OptionParser btag_bins_available = ['0btag', '1btag', '2btags', '3btags', '4orMoreBtags' diff --git a/legacy/cutFlow.C b/dps/legacy/cutFlow.C similarity index 100% rename from legacy/cutFlow.C rename to dps/legacy/cutFlow.C diff --git a/legacy/cutFlow_C.d b/dps/legacy/cutFlow_C.d similarity index 100% rename from legacy/cutFlow_C.d rename to dps/legacy/cutFlow_C.d diff --git a/legacy/data/QCD_data_mu.root b/dps/legacy/data/QCD_data_mu.root similarity index 100% rename from legacy/data/QCD_data_mu.root rename to dps/legacy/data/QCD_data_mu.root diff --git a/src/zprime_analysis/__init__.py b/dps/legacy/data/__init__.py similarity index 100% rename from src/zprime_analysis/__init__.py rename to dps/legacy/data/__init__.py diff --git a/legacy/data/acceptanceFactors/acceptanceFactors_EPlusJets_2orMoreBtags_JSON.txt b/dps/legacy/data/acceptanceFactors/acceptanceFactors_EPlusJets_2orMoreBtags_JSON.txt similarity index 100% rename from legacy/data/acceptanceFactors/acceptanceFactors_EPlusJets_2orMoreBtags_JSON.txt rename to dps/legacy/data/acceptanceFactors/acceptanceFactors_EPlusJets_2orMoreBtags_JSON.txt diff --git a/legacy/data/acceptanceFactors/acceptanceFactors_MuPlusJets_2orMoreBtags_JSON.txt b/dps/legacy/data/acceptanceFactors/acceptanceFactors_MuPlusJets_2orMoreBtags_JSON.txt similarity index 100% rename from legacy/data/acceptanceFactors/acceptanceFactors_MuPlusJets_2orMoreBtags_JSON.txt rename to dps/legacy/data/acceptanceFactors/acceptanceFactors_MuPlusJets_2orMoreBtags_JSON.txt diff --git a/legacy/data/contaminationFactors/contaminationFactors_EPlusJets_2orMoreBtags_JSON.txt b/dps/legacy/data/contaminationFactors/contaminationFactors_EPlusJets_2orMoreBtags_JSON.txt similarity index 100% rename from legacy/data/contaminationFactors/contaminationFactors_EPlusJets_2orMoreBtags_JSON.txt rename to dps/legacy/data/contaminationFactors/contaminationFactors_EPlusJets_2orMoreBtags_JSON.txt diff --git a/legacy/data/contaminationFactors/contaminationFactors_MuPlusJets_2orMoreBtags_JSON.txt b/dps/legacy/data/contaminationFactors/contaminationFactors_MuPlusJets_2orMoreBtags_JSON.txt similarity index 100% rename from legacy/data/contaminationFactors/contaminationFactors_MuPlusJets_2orMoreBtags_JSON.txt rename to dps/legacy/data/contaminationFactors/contaminationFactors_MuPlusJets_2orMoreBtags_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_EPlusJets_0btag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_0btag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_EPlusJets_0btag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_0btag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_EPlusJets_0orMoreBtag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_0orMoreBtag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_EPlusJets_0orMoreBtag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_0orMoreBtag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_EPlusJets_1btag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_1btag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_EPlusJets_1btag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_1btag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_EPlusJets_1orMoreBtag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_1orMoreBtag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_EPlusJets_1orMoreBtag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_1orMoreBtag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_EPlusJets_2orMoreBtags_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_2orMoreBtags_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_EPlusJets_2orMoreBtags_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_EPlusJets_2orMoreBtags_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_MuPlusJets_0btag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_0btag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_MuPlusJets_0btag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_0btag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_MuPlusJets_0orMoreBtag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_0orMoreBtag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_MuPlusJets_0orMoreBtag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_0orMoreBtag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_MuPlusJets_1btag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_1btag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_MuPlusJets_1btag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_1btag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_MuPlusJets_1orMoreBtag_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_1orMoreBtag_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_MuPlusJets_1orMoreBtag_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_1orMoreBtag_JSON.txt diff --git a/legacy/data/correctionFactors/correctionFactors_MuPlusJets_2orMoreBtags_JSON.txt b/dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_2orMoreBtags_JSON.txt similarity index 100% rename from legacy/data/correctionFactors/correctionFactors_MuPlusJets_2orMoreBtags_JSON.txt rename to dps/legacy/data/correctionFactors/correctionFactors_MuPlusJets_2orMoreBtags_JSON.txt diff --git a/legacy/data/etaAbs_ge2j_tight.root b/dps/legacy/data/etaAbs_ge2j_tight.root similarity index 100% rename from legacy/data/etaAbs_ge2j_tight.root rename to dps/legacy/data/etaAbs_ge2j_tight.root diff --git a/legacy/deleteGridFolder b/dps/legacy/deleteGridFolder similarity index 100% rename from legacy/deleteGridFolder rename to dps/legacy/deleteGridFolder diff --git a/legacy/delete_gpfs_dir.py b/dps/legacy/delete_gpfs_dir.py similarity index 100% rename from legacy/delete_gpfs_dir.py rename to dps/legacy/delete_gpfs_dir.py diff --git a/tools/tests/__init__.py b/dps/legacy/dev/__init__.py similarity index 100% rename from tools/tests/__init__.py rename to dps/legacy/dev/__init__.py diff --git a/legacy/dev/roofittest.py b/dps/legacy/dev/roofittest.py similarity index 100% rename from legacy/dev/roofittest.py rename to dps/legacy/dev/roofittest.py diff --git a/legacy/dev/roofittest2.py b/dps/legacy/dev/roofittest2.py similarity index 99% rename from legacy/dev/roofittest2.py rename to dps/legacy/dev/roofittest2.py index c4489972..c271f7fd 100644 --- a/legacy/dev/roofittest2.py +++ b/dps/legacy/dev/roofittest2.py @@ -7,7 +7,7 @@ ''' from ROOT import * from ROOT import RooRealVar, RooDataHist, RooArgList, RooHistPdf, RooArgSet, RooAddPdf -import tools.ROOTFileReader as FileReader +import dps.utils.ROOTFileReader as FileReader import FILES from math import sqrt h_m3_data = FileReader.getHistogramFromFile("TTbarPlusMetAnalysis/EPlusJets/Ref selection/BinnedMETAnalysis/Electron_PFMET_bin_0-25/electron_AbsEta_2orMoreBtags", FILES.files['ElectronHad']) diff --git a/legacy/do2DMET.C b/dps/legacy/do2DMET.C similarity index 100% rename from legacy/do2DMET.C rename to dps/legacy/do2DMET.C diff --git a/legacy/do2DMET_C.d b/dps/legacy/do2DMET_C.d similarity index 100% rename from legacy/do2DMET_C.d rename to dps/legacy/do2DMET_C.d diff --git a/legacy/do2DPlots.C b/dps/legacy/do2DPlots.C similarity index 100% rename from legacy/do2DPlots.C rename to dps/legacy/do2DPlots.C diff --git a/legacy/do2DPlots_C.d b/dps/legacy/do2DPlots_C.d similarity index 100% rename from legacy/do2DPlots_C.d rename to dps/legacy/do2DPlots_C.d diff --git a/legacy/eventPicker.py b/dps/legacy/eventPicker.py similarity index 100% rename from legacy/eventPicker.py rename to dps/legacy/eventPicker.py diff --git a/legacy/fileInfo.py b/dps/legacy/fileInfo.py similarity index 100% rename from legacy/fileInfo.py rename to dps/legacy/fileInfo.py diff --git a/legacy/fit_data_Njet.py b/dps/legacy/fit_data_Njet.py similarity index 100% rename from legacy/fit_data_Njet.py rename to dps/legacy/fit_data_Njet.py diff --git a/legacy/forLuke/PFhistosForFitting_central.root b/dps/legacy/forLuke/PFhistosForFitting_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_central.root rename to dps/legacy/forLuke/PFhistosForFitting_central.root diff --git a/legacy/forLuke/PFhistosForFitting_met1_central.root b/dps/legacy/forLuke/PFhistosForFitting_met1_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_met1_central.root rename to dps/legacy/forLuke/PFhistosForFitting_met1_central.root diff --git a/legacy/forLuke/PFhistosForFitting_met2_central.root b/dps/legacy/forLuke/PFhistosForFitting_met2_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_met2_central.root rename to dps/legacy/forLuke/PFhistosForFitting_met2_central.root diff --git a/legacy/forLuke/PFhistosForFitting_met3_central.root b/dps/legacy/forLuke/PFhistosForFitting_met3_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_met3_central.root rename to dps/legacy/forLuke/PFhistosForFitting_met3_central.root diff --git a/legacy/forLuke/PFhistosForFitting_met4_central.root b/dps/legacy/forLuke/PFhistosForFitting_met4_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_met4_central.root rename to dps/legacy/forLuke/PFhistosForFitting_met4_central.root diff --git a/legacy/forLuke/PFhistosForFitting_met5_central.root b/dps/legacy/forLuke/PFhistosForFitting_met5_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_met5_central.root rename to dps/legacy/forLuke/PFhistosForFitting_met5_central.root diff --git a/legacy/forLuke/PFhistosForFitting_metall_central.root b/dps/legacy/forLuke/PFhistosForFitting_metall_central.root similarity index 100% rename from legacy/forLuke/PFhistosForFitting_metall_central.root rename to dps/legacy/forLuke/PFhistosForFitting_metall_central.root diff --git a/legacy/forLuke/QCDetaData.root b/dps/legacy/forLuke/QCDetaData.root similarity index 100% rename from legacy/forLuke/QCDetaData.root rename to dps/legacy/forLuke/QCDetaData.root diff --git a/legacy/forLuke/TempFitEta.cc b/dps/legacy/forLuke/TempFitEta.cc similarity index 100% rename from legacy/forLuke/TempFitEta.cc rename to dps/legacy/forLuke/TempFitEta.cc diff --git a/legacy/forLuke/diffTempFit.cc b/dps/legacy/forLuke/diffTempFit.cc similarity index 100% rename from legacy/forLuke/diffTempFit.cc rename to dps/legacy/forLuke/diffTempFit.cc diff --git a/legacy/forLuke/pseudoExpsData.C b/dps/legacy/forLuke/pseudoExpsData.C similarity index 100% rename from legacy/forLuke/pseudoExpsData.C rename to dps/legacy/forLuke/pseudoExpsData.C diff --git a/legacy/forLuke/systematicFits.cc b/dps/legacy/forLuke/systematicFits.cc similarity index 100% rename from legacy/forLuke/systematicFits.cc rename to dps/legacy/forLuke/systematicFits.cc diff --git a/legacy/forLuke/systematicFits.py b/dps/legacy/forLuke/systematicFits.py similarity index 100% rename from legacy/forLuke/systematicFits.py rename to dps/legacy/forLuke/systematicFits.py diff --git a/legacy/forLuke/systematicFitsExp.cc b/dps/legacy/forLuke/systematicFitsExp.cc similarity index 100% rename from legacy/forLuke/systematicFitsExp.cc rename to dps/legacy/forLuke/systematicFitsExp.cc diff --git a/legacy/forLuke/systematicFits_cc.d b/dps/legacy/forLuke/systematicFits_cc.d similarity index 100% rename from legacy/forLuke/systematicFits_cc.d rename to dps/legacy/forLuke/systematicFits_cc.d diff --git a/legacy/forLuke/tdrStyle.py b/dps/legacy/forLuke/tdrStyle.py similarity index 100% rename from legacy/forLuke/tdrStyle.py rename to dps/legacy/forLuke/tdrStyle.py diff --git a/legacy/forLuke/tdrstyle.C b/dps/legacy/forLuke/tdrstyle.C similarity index 100% rename from legacy/forLuke/tdrstyle.C rename to dps/legacy/forLuke/tdrstyle.C diff --git a/legacy/getFileSize.py b/dps/legacy/getFileSize.py similarity index 100% rename from legacy/getFileSize.py rename to dps/legacy/getFileSize.py diff --git a/legacy/getHLTSuggestion.py b/dps/legacy/getHLTSuggestion.py similarity index 100% rename from legacy/getHLTSuggestion.py rename to dps/legacy/getHLTSuggestion.py diff --git a/legacy/getSkimEfficiency.py b/dps/legacy/getSkimEfficiency.py similarity index 100% rename from legacy/getSkimEfficiency.py rename to dps/legacy/getSkimEfficiency.py diff --git a/legacy/getTriggerReport.py b/dps/legacy/getTriggerReport.py similarity index 100% rename from legacy/getTriggerReport.py rename to dps/legacy/getTriggerReport.py diff --git a/legacy/inputFiles.py b/dps/legacy/inputFiles.py similarity index 100% rename from legacy/inputFiles.py rename to dps/legacy/inputFiles.py diff --git a/legacy/log.terr b/dps/legacy/log.terr similarity index 100% rename from legacy/log.terr rename to dps/legacy/log.terr diff --git a/legacy/makeCutFlow.py b/dps/legacy/makeCutFlow.py similarity index 100% rename from legacy/makeCutFlow.py rename to dps/legacy/makeCutFlow.py diff --git a/legacy/makeDiffVariablesPlots.py b/dps/legacy/makeDiffVariablesPlots.py similarity index 98% rename from legacy/makeDiffVariablesPlots.py rename to dps/legacy/makeDiffVariablesPlots.py index 8f022a21..797965fe 100644 --- a/legacy/makeDiffVariablesPlots.py +++ b/dps/legacy/makeDiffVariablesPlots.py @@ -1,4 +1,4 @@ -import tools.PlottingUtilities as plotting +import dps.utils.PlottingUtilities as plotting from ROOT import * import sys diff --git a/legacy/makeHLTPlots.py b/dps/legacy/makeHLTPlots.py similarity index 100% rename from legacy/makeHLTPlots.py rename to dps/legacy/makeHLTPlots.py diff --git a/legacy/makeMETplots.py b/dps/legacy/makeMETplots.py similarity index 98% rename from legacy/makeMETplots.py rename to dps/legacy/makeMETplots.py index d3ce216e..450ac975 100644 --- a/legacy/makeMETplots.py +++ b/dps/legacy/makeMETplots.py @@ -1,4 +1,4 @@ -import tools.PlottingUtilities as plotting +import dps.utils.PlottingUtilities as plotting from ROOT import * import sys diff --git a/legacy/makePlots.py b/dps/legacy/makePlots.py similarity index 100% rename from legacy/makePlots.py rename to dps/legacy/makePlots.py diff --git a/legacy/makePrettyPlots.py b/dps/legacy/makePrettyPlots.py similarity index 96% rename from legacy/makePrettyPlots.py rename to dps/legacy/makePrettyPlots.py index 92a44c64..befe3bd1 100644 --- a/legacy/makePrettyPlots.py +++ b/dps/legacy/makePrettyPlots.py @@ -13,7 +13,7 @@ ''' -import tools.PlottingUtilities as plotting +import dps.utils.PlottingUtilities as plotting import FILES import ROOTFileReader as reader import QCDRateEstimation diff --git a/legacy/makeShinyPlots.py b/dps/legacy/makeShinyPlots.py similarity index 100% rename from legacy/makeShinyPlots.py rename to dps/legacy/makeShinyPlots.py diff --git a/legacy/mcNTuple.txt b/dps/legacy/mcNTuple.txt similarity index 100% rename from legacy/mcNTuple.txt rename to dps/legacy/mcNTuple.txt diff --git a/legacy/measureCrossSection.py b/dps/legacy/measureCrossSection.py similarity index 99% rename from legacy/measureCrossSection.py rename to dps/legacy/measureCrossSection.py index 5b6f3d98..1875f211 100644 --- a/legacy/measureCrossSection.py +++ b/dps/legacy/measureCrossSection.py @@ -6,19 +6,19 @@ from ROOT import RooRealVar, RooDataHist, RooArgList, RooHistPdf, RooArgSet, RooAddPdf, RooMCStudy, RooFit, RooMsgService import ROOT import FILES -import tools.ROOTFileReader as FileReader +import dps.utils.ROOTFileReader as FileReader from array import array -import tools.PlottingUtilities as plotting -import tools.FileUtilities as fileutils +from dps.utils import plotting +from dps.utils import file_utilities #import QCDRateEstimation from copy import deepcopy import numpy -from tools.Timer import Timer +from dps.utils.Timer import Timer import QCDRateEstimation from optparse import OptionParser -from tools.ColorPrinter import colorstr +from dps.utils.ColorPrinter import colorstr import json -from config.sampleSummations import qcd_samples, muon_qcd_samples, singleTop_samples, wplusjets_samples, zplusjets_samples, allMC_samples, signal_samples, vplusjets_samples +from dps.config.sampleSummations import qcd_samples, muon_qcd_samples, singleTop_samples, wplusjets_samples, zplusjets_samples, allMC_samples, signal_samples, vplusjets_samples from sets import Set import sys correctionFactors = None diff --git a/legacy/mergeROOTFilesWithCompression.py b/dps/legacy/mergeROOTFilesWithCompression.py similarity index 100% rename from legacy/mergeROOTFilesWithCompression.py rename to dps/legacy/mergeROOTFilesWithCompression.py diff --git a/legacy/nTupleInfo.py b/dps/legacy/nTupleInfo.py similarity index 100% rename from legacy/nTupleInfo.py rename to dps/legacy/nTupleInfo.py diff --git a/legacy/plotMttbar.py b/dps/legacy/plotMttbar.py similarity index 100% rename from legacy/plotMttbar.py rename to dps/legacy/plotMttbar.py diff --git a/legacy/plotQCDEstimate.py b/dps/legacy/plotQCDEstimate.py similarity index 100% rename from legacy/plotQCDEstimate.py rename to dps/legacy/plotQCDEstimate.py diff --git a/legacy/plots/allplots.py b/dps/legacy/plots/allplots.py similarity index 100% rename from legacy/plots/allplots.py rename to dps/legacy/plots/allplots.py diff --git a/legacy/plots/allplots_multi.py b/dps/legacy/plots/allplots_multi.py similarity index 100% rename from legacy/plots/allplots_multi.py rename to dps/legacy/plots/allplots_multi.py diff --git a/legacy/plots/index.html b/dps/legacy/plots/index.html similarity index 100% rename from legacy/plots/index.html rename to dps/legacy/plots/index.html diff --git a/legacy/prescaleTableParser.py b/dps/legacy/prescaleTableParser.py similarity index 100% rename from legacy/prescaleTableParser.py rename to dps/legacy/prescaleTableParser.py diff --git a/legacy/printCutFlow.py b/dps/legacy/printCutFlow.py similarity index 98% rename from legacy/printCutFlow.py rename to dps/legacy/printCutFlow.py index 56466fcd..dca043f2 100644 --- a/legacy/printCutFlow.py +++ b/dps/legacy/printCutFlow.py @@ -14,8 +14,8 @@ from __future__ import division from ROOT import * -import tools.ROOTFileReader as FileReader -import tools.PlottingUtilities as plotting +import dps.utils.ROOTFileReader as FileReader +import dps.utils.PlottingUtilities as plotting import FILES from math import sqrt import QCDRateEstimation diff --git a/legacy/printNTupleContent.py b/dps/legacy/printNTupleContent.py similarity index 100% rename from legacy/printNTupleContent.py rename to dps/legacy/printNTupleContent.py diff --git a/legacy/purityAndStability_METbins.py b/dps/legacy/purityAndStability_METbins.py similarity index 98% rename from legacy/purityAndStability_METbins.py rename to dps/legacy/purityAndStability_METbins.py index 756b8150..2307e29c 100644 --- a/legacy/purityAndStability_METbins.py +++ b/dps/legacy/purityAndStability_METbins.py @@ -7,9 +7,9 @@ ''' import FILES -import tools.ROOTFileReader as FileReader +import dps.utils.ROOTFileReader as FileReader from ROOT import gROOT -import tools.FileUtilities as FileUtils +import dps.utils.FileUtilities as FileUtils fileTemplate = 'data/correctionFactors/correctionFactors_%s_%s_JSON.txt' samples = [ diff --git a/legacy/purityAndStability_METbins_V0.py b/dps/legacy/purityAndStability_METbins_V0.py similarity index 99% rename from legacy/purityAndStability_METbins_V0.py rename to dps/legacy/purityAndStability_METbins_V0.py index d74d3b56..febc85a3 100644 --- a/legacy/purityAndStability_METbins_V0.py +++ b/dps/legacy/purityAndStability_METbins_V0.py @@ -7,7 +7,7 @@ ''' import FILES -import tools.ROOTFileReader as FileReader +import dps.utils.ROOTFileReader as FileReader from ROOT import gROOT fileTemplate = 'data/correctionFactors/correctionFactors_%s_%s_JSON.txt' diff --git a/legacy/purityAndStability_METbins_old.py b/dps/legacy/purityAndStability_METbins_old.py similarity index 98% rename from legacy/purityAndStability_METbins_old.py rename to dps/legacy/purityAndStability_METbins_old.py index 1beae0c1..f064e3dc 100644 --- a/legacy/purityAndStability_METbins_old.py +++ b/dps/legacy/purityAndStability_METbins_old.py @@ -5,9 +5,9 @@ Email: Lukasz.Kreczko@cern.ch ''' -from tools import Styles +from dps.utils.import Styles import FILES -import tools.ROOTFileReader as FileReader +import dps.utils.ROOTFileReader as FileReader from ROOT import * #import HistGetter diff --git a/legacy/readMergeLog.py b/dps/legacy/readMergeLog.py similarity index 100% rename from legacy/readMergeLog.py rename to dps/legacy/readMergeLog.py diff --git a/legacy/remove_duplicates b/dps/legacy/remove_duplicates similarity index 100% rename from legacy/remove_duplicates rename to dps/legacy/remove_duplicates diff --git a/legacy/remove_duplicates.py b/dps/legacy/remove_duplicates.py similarity index 100% rename from legacy/remove_duplicates.py rename to dps/legacy/remove_duplicates.py diff --git a/legacy/roofittest2.py b/dps/legacy/roofittest2.py similarity index 99% rename from legacy/roofittest2.py rename to dps/legacy/roofittest2.py index 9041e0a8..2e1e41f4 100644 --- a/legacy/roofittest2.py +++ b/dps/legacy/roofittest2.py @@ -7,7 +7,7 @@ ''' #from ROOT import * from ROOT import RooRealVar, RooDataHist, RooArgList, RooHistPdf, RooArgSet, RooAddPdf, RooMCStudy, RooFit -import tools.ROOTFileReader as FileReader +import dps.utils.ROOTFileReader as FileReader import FILES from math import sqrt diff --git a/legacy/rootplot_config.py b/dps/legacy/rootplot_config.py similarity index 100% rename from legacy/rootplot_config.py rename to dps/legacy/rootplot_config.py diff --git a/legacy/rootplotmpl_config.py b/dps/legacy/rootplotmpl_config.py similarity index 100% rename from legacy/rootplotmpl_config.py rename to dps/legacy/rootplotmpl_config.py diff --git a/legacy/rootplottest.py b/dps/legacy/rootplottest.py similarity index 100% rename from legacy/rootplottest.py rename to dps/legacy/rootplottest.py diff --git a/legacy/run.sh b/dps/legacy/run.sh similarity index 100% rename from legacy/run.sh rename to dps/legacy/run.sh diff --git a/legacy/run_mc.sh b/dps/legacy/run_mc.sh similarity index 100% rename from legacy/run_mc.sh rename to dps/legacy/run_mc.sh diff --git a/legacy/startElectronAnalysis.sh b/dps/legacy/startElectronAnalysis.sh similarity index 100% rename from legacy/startElectronAnalysis.sh rename to dps/legacy/startElectronAnalysis.sh diff --git a/legacy/startMeasurement.sh b/dps/legacy/startMeasurement.sh similarity index 100% rename from legacy/startMeasurement.sh rename to dps/legacy/startMeasurement.sh diff --git a/legacy/tdrStyle.py b/dps/legacy/tdrStyle.py similarity index 100% rename from legacy/tdrStyle.py rename to dps/legacy/tdrStyle.py diff --git a/legacy/tdrstyle.C b/dps/legacy/tdrstyle.C similarity index 100% rename from legacy/tdrstyle.C rename to dps/legacy/tdrstyle.C diff --git a/legacy/tools/ColorPrinter.py b/dps/legacy/tools/ColorPrinter.py similarity index 100% rename from legacy/tools/ColorPrinter.py rename to dps/legacy/tools/ColorPrinter.py diff --git a/legacy/tools/CrossSectionMeasurement.py b/dps/legacy/tools/CrossSectionMeasurement.py similarity index 100% rename from legacy/tools/CrossSectionMeasurement.py rename to dps/legacy/tools/CrossSectionMeasurement.py diff --git a/legacy/tools/ErrorCalculation.py b/dps/legacy/tools/ErrorCalculation.py similarity index 100% rename from legacy/tools/ErrorCalculation.py rename to dps/legacy/tools/ErrorCalculation.py diff --git a/legacy/tools/ErrorSources.py b/dps/legacy/tools/ErrorSources.py similarity index 100% rename from legacy/tools/ErrorSources.py rename to dps/legacy/tools/ErrorSources.py diff --git a/legacy/tools/FileUtilities.py b/dps/legacy/tools/FileUtilities.py similarity index 100% rename from legacy/tools/FileUtilities.py rename to dps/legacy/tools/FileUtilities.py diff --git a/legacy/tools/Fitter.py b/dps/legacy/tools/Fitter.py similarity index 100% rename from legacy/tools/Fitter.py rename to dps/legacy/tools/Fitter.py diff --git a/legacy/tools/Log.py b/dps/legacy/tools/Log.py similarity index 100% rename from legacy/tools/Log.py rename to dps/legacy/tools/Log.py diff --git a/legacy/tools/NTuples.py b/dps/legacy/tools/NTuples.py similarity index 100% rename from legacy/tools/NTuples.py rename to dps/legacy/tools/NTuples.py diff --git a/legacy/tools/PlottingUtilities.py b/dps/legacy/tools/PlottingUtilities.py similarity index 100% rename from legacy/tools/PlottingUtilities.py rename to dps/legacy/tools/PlottingUtilities.py diff --git a/legacy/tools/Printer.py b/dps/legacy/tools/Printer.py similarity index 100% rename from legacy/tools/Printer.py rename to dps/legacy/tools/Printer.py diff --git a/legacy/tools/ROOTFileReader.py b/dps/legacy/tools/ROOTFileReader.py similarity index 97% rename from legacy/tools/ROOTFileReader.py rename to dps/legacy/tools/ROOTFileReader.py index 2c6560f2..1d6ce918 100644 --- a/legacy/tools/ROOTFileReader.py +++ b/dps/legacy/tools/ROOTFileReader.py @@ -1,6 +1,6 @@ from ROOT import TFile, gROOT -import tools.Log as Log -from config.sampleSummations import btag_bins_inclusive, btag_sums +import dps.utils.Log as Log +from dps.config.sampleSummations import btag_bins_inclusive, btag_sums openRootFile = TFile.Open gcd = gROOT.cd diff --git a/legacy/tools/ROOTFileWriter.py b/dps/legacy/tools/ROOTFileWriter.py similarity index 100% rename from legacy/tools/ROOTFileWriter.py rename to dps/legacy/tools/ROOTFileWriter.py diff --git a/legacy/tools/Styles.py b/dps/legacy/tools/Styles.py similarity index 100% rename from legacy/tools/Styles.py rename to dps/legacy/tools/Styles.py diff --git a/legacy/tools/Table.py b/dps/legacy/tools/Table.py similarity index 100% rename from legacy/tools/Table.py rename to dps/legacy/tools/Table.py diff --git a/legacy/tools/Timer.py b/dps/legacy/tools/Timer.py similarity index 100% rename from legacy/tools/Timer.py rename to dps/legacy/tools/Timer.py diff --git a/legacy/tools/Trigger.py b/dps/legacy/tools/Trigger.py similarity index 100% rename from legacy/tools/Trigger.py rename to dps/legacy/tools/Trigger.py diff --git a/legacy/tools/WPlusJetsEstimation.py b/dps/legacy/tools/WPlusJetsEstimation.py similarity index 100% rename from legacy/tools/WPlusJetsEstimation.py rename to dps/legacy/tools/WPlusJetsEstimation.py diff --git a/dps/legacy/tools/__init__.py b/dps/legacy/tools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/legacy/unfolding_test.py b/dps/legacy/unfolding_test.py similarity index 97% rename from legacy/unfolding_test.py rename to dps/legacy/unfolding_test.py index 6c9b1c84..012b14e7 100644 --- a/legacy/unfolding_test.py +++ b/dps/legacy/unfolding_test.py @@ -6,10 +6,10 @@ import matplotlib.pyplot as plt from rootpy import asrootpy from array import array -from tools.Unfolding import Unfolding -import config.RooUnfold as unfoldCfg -from config import CMS, RooUnfold -from tools.ROOT_utililities import set_root_defaults +from dps.utils.Unfolding import Unfolding +import dps.config.RooUnfold as unfoldCfg +from dps.config import CMS, RooUnfold +from dps.utils.ROOT_utils import set_root_defaults def saveClosureTest(unfolding, outputfile, **kwargs): diff --git a/legacy/verifyNTupleProduction.py b/dps/legacy/verifyNTupleProduction.py similarity index 100% rename from legacy/verifyNTupleProduction.py rename to dps/legacy/verifyNTupleProduction.py diff --git a/legacy/workspace_electron_channel_2orMoreBtags.py b/dps/legacy/workspace_electron_channel_2orMoreBtags.py similarity index 100% rename from legacy/workspace_electron_channel_2orMoreBtags.py rename to dps/legacy/workspace_electron_channel_2orMoreBtags.py diff --git a/tools/Calculation.py b/dps/utils/Calculation.py similarity index 99% rename from tools/Calculation.py rename to dps/utils/Calculation.py index 1ce4f4d4..bd5eafc3 100644 --- a/tools/Calculation.py +++ b/dps/utils/Calculation.py @@ -5,9 +5,8 @@ ''' from __future__ import division from uncertainties import ufloat -import numpy from math import sqrt -from config.met_systematics import metsystematics_sources +from dps.config.met_systematics import metsystematics_sources from rootpy import asrootpy def calculate_xsection(inputs, luminosity, efficiency=1.): diff --git a/tools/Fitting.py b/dps/utils/Fitting.py similarity index 99% rename from tools/Fitting.py rename to dps/utils/Fitting.py index b736a5a9..331394b7 100644 --- a/tools/Fitting.py +++ b/dps/utils/Fitting.py @@ -11,7 +11,7 @@ from array import array import math import logging -from hist_utilities import adjust_overflow_to_limit +from .hist_utilities import adjust_overflow_to_limit import rootpy.stl as stl from copy import deepcopy # RooFit is really verbose. Make it stop @@ -425,7 +425,7 @@ def fit( self ): #WARNING: number of cores changes the results!!! self.saved_result = use_model.fitTo( roofit_histograms[self.data_label], - RooFit.Minimizer( "Minuit2", "Migrad" ), + RooFit.Minimizer( "Minuit", "Migrad" ), RooFit.NumCPU( 1 ), RooFit.Extended(), RooFit.Save(), diff --git a/tools/HistSet.py b/dps/utils/HistSet.py similarity index 96% rename from tools/HistSet.py rename to dps/utils/HistSet.py index d51c0a0d..fdbbafd6 100644 --- a/tools/HistSet.py +++ b/dps/utils/HistSet.py @@ -4,11 +4,11 @@ @author: kreczko ''' import sys -from tools.plotting import Histogram_properties, make_shape_comparison_plot,\ +from .plotting import Histogram_properties, make_shape_comparison_plot,\ make_data_mc_comparison_plot from ROOT_utils import get_histogram_from_file import types -from tools.hist_utilities import conditional_rebin +from .hist_utilities import conditional_rebin class HistSet(): ''' diff --git a/tools/NTuple.py b/dps/utils/NTuple.py similarity index 100% rename from tools/NTuple.py rename to dps/utils/NTuple.py diff --git a/tools/QCD_rate_estimation.py b/dps/utils/QCD_rate_estimation.py similarity index 100% rename from tools/QCD_rate_estimation.py rename to dps/utils/QCD_rate_estimation.py diff --git a/tools/ROOT_utils.py b/dps/utils/ROOT_utils.py similarity index 99% rename from tools/ROOT_utils.py rename to dps/utils/ROOT_utils.py index c26c42f2..c2f4b48e 100644 --- a/tools/ROOT_utils.py +++ b/dps/utils/ROOT_utils.py @@ -8,8 +8,8 @@ from ROOT import gROOT, TH1F from rootpy.plotting import Hist gcd = gROOT.cd -import config.summations_common as sumations -from tools.logger import log +import dps.config.summations_common as sumations +from .logger import log # define logger for this module root_utils_log = log["ROOT_Utils"] diff --git a/tools/Styles.py b/dps/utils/Styles.py similarity index 100% rename from tools/Styles.py rename to dps/utils/Styles.py diff --git a/tools/Timer.py b/dps/utils/Timer.py similarity index 100% rename from tools/Timer.py rename to dps/utils/Timer.py diff --git a/tools/Unfolding.py b/dps/utils/Unfolding.py similarity index 99% rename from tools/Unfolding.py rename to dps/utils/Unfolding.py index 70237fd3..dacb9986 100644 --- a/tools/Unfolding.py +++ b/dps/utils/Unfolding.py @@ -5,9 +5,9 @@ ''' from __future__ import division from ROOT import gSystem, cout, TDecompSVD -from tools.ROOT_utils import set_root_defaults +from .ROOT_utils import set_root_defaults set_root_defaults(set_batch=True, msg_ignore_level=3001) -from tools.hist_utilities import hist_to_value_error_tuplelist +from .hist_utilities import hist_to_value_error_tuplelist from ROOT import TUnfoldDensity, TUnfold from ROOT import TH2D, TH1D, TGraph from rootpy import asrootpy diff --git a/tools/__init__.py b/dps/utils/__init__.py similarity index 96% rename from tools/__init__.py rename to dps/utils/__init__.py index 60d0e4d0..f19f5926 100644 --- a/tools/__init__.py +++ b/dps/utils/__init__.py @@ -1,4 +1,4 @@ -from tools.logger import log +from .logger import log # define logger for this module tools_log = log["tools"] diff --git a/tools/das_client.py b/dps/utils/das_client.py similarity index 100% rename from tools/das_client.py rename to dps/utils/das_client.py diff --git a/tools/datapoint_position.py b/dps/utils/datapoint_position.py similarity index 96% rename from tools/datapoint_position.py rename to dps/utils/datapoint_position.py index 917d4a0f..66ec4dd7 100644 --- a/tools/datapoint_position.py +++ b/dps/utils/datapoint_position.py @@ -4,8 +4,7 @@ @author: kreczko ''' -from rootpy import asrootpy -from tools.hist_utilities import rebin_asymmetric +from .hist_utilities import rebin_asymmetric def get_bin_centers(bin_edges): centers = [] diff --git a/tools/file_utilities.py b/dps/utils/file_utilities.py similarity index 100% rename from tools/file_utilities.py rename to dps/utils/file_utilities.py diff --git a/tools/grid_utilities.py b/dps/utils/grid_utilities.py similarity index 100% rename from tools/grid_utilities.py rename to dps/utils/grid_utilities.py diff --git a/tools/hist_utilities.py b/dps/utils/hist_utilities.py similarity index 99% rename from tools/hist_utilities.py rename to dps/utils/hist_utilities.py index e2642d9b..03db4003 100644 --- a/tools/hist_utilities.py +++ b/dps/utils/hist_utilities.py @@ -13,8 +13,8 @@ import random import string from copy import deepcopy -from tools.file_utilities import read_data_from_JSON -from tools.logger import log +from .file_utilities import read_data_from_JSON +from .logger import log hu_log = log["tools/hist_utilities"] def hist_to_value_error_tuplelist( hist ): @@ -349,8 +349,8 @@ def get_fitted_normalisation( variable, channel, path_to_JSON, category, met_typ This function now gets the error on the fit correctly, so that it can be applied if the --normalise_to_fit option is used ''' - import config.variable_binning - variable_bins_ROOT = config.variable_binning.variable_bins_ROOT + from dps.config import variable_binning as cfg_binning + variable_bins_ROOT = cfg_binning.variable_bins_ROOT fit_results = read_data_from_JSON( path_to_JSON + variable + '/fit_results/' + category + '/fit_results_' + channel + '_' + met_type + '.txt' ) N_fit_ttjet = [0, 0] diff --git a/tools/input.py b/dps/utils/input.py similarity index 81% rename from tools/input.py rename to dps/utils/input.py index ce5fc7b2..ea748b85 100644 --- a/tools/input.py +++ b/dps/utils/input.py @@ -4,12 +4,13 @@ import os from rootpy.io import File -from tools import log -import tools.ROOT_utils -import tools.file_utilities +from . import log +from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON +from dps.utils.ROOT_utils import get_histogram_from_file,\ + get_histogram_from_tree # define logger for this module -input_log = log["tools.input"] +input_log = log["dps.utils.input"] class Input(): @@ -70,24 +71,31 @@ def __init__(self, **kwargs): self.scale = kwargs.pop('scale') # store remaining parameters self.kwargs = kwargs + self.error = '' @input_log.trace() def isValid(self): # file has to exists if not os.path.exists(self.file): - input_log.debug('File does not exist: ' + self.file) + msg = 'File does not exist: ' + self.file + self.error = msg + input_log.debug(msg) return False if self.hist_name: with File.open(self.file) as f: if not f.__contains__(self.hist_name): msg = 'File "{0}" does not contain histogram "{1}"' - input_log.debug(msg.format(self.file, self.hist_name)) + msg = msg.format(self.file, self.hist_name) + self.error = msg + input_log.debug(msg) return False if self.tree_name: with File.open(self.file) as f: if not f.__contains__(self.tree_name): msg = 'File "{0}" does not contain tree "{1}"' - input_log.debug(msg.format(self.file, self.tree_name)) + msg = msg.format(self.file, self.tree_name) + self.error = msg + input_log.debug(msg) return False tree = f[self.tree_name] branchToCheck = self.branch @@ -97,21 +105,23 @@ def isValid(self): branchToCheck = branchToCheck.split('(')[-1].split(')')[0] if not tree.has_branch(branchToCheck): msg = 'Tree "{0}" does not contain branch "{1}"' - input_log.debug(msg.format(self.tree_name, branchToCheck)) + msg = msg.format(self.tree_name, branchToCheck) + self.error = msg + input_log.debug(msg) return False return True @input_log.trace() def read(self): if not self.isValid(): - raise ValueError('Inputs are not valid') + raise ValueError('Inputs are not valid {0}'.format(self.error)) if self.hist_name: - self.hist = tools.ROOT_utils.get_histogram_from_file( + self.hist = get_histogram_from_file( self.hist_name, self.file) if self.tree_name: - self.hist = tools.ROOT_utils.get_histogram_from_tree( + self.hist = get_histogram_from_tree( tree=self.tree_name, branch=self.branch, weight_branches=self.weight_branches, @@ -124,14 +134,14 @@ def read(self): @staticmethod def fromJSON(json_file): - src = tools.file_utilities.read_data_from_JSON(json_file) + src = read_data_from_JSON(json_file) i = Input(**src) return i @input_log.trace() def toJSON(self, json_file): d = self.toDict() - tools.file_utilities.write_data_to_JSON(d, json_file) + write_data_to_JSON(d, json_file) @input_log.trace() def toDict(self): diff --git a/tools/latex.py b/dps/utils/latex.py similarity index 98% rename from tools/latex.py rename to dps/utils/latex.py index d794f883..03660ab6 100644 --- a/tools/latex.py +++ b/dps/utils/latex.py @@ -5,7 +5,7 @@ ''' from matplotlib import rc, rcParams -from config import CMS +from dps.config import CMS import subprocess import os from distutils.spawn import find_executable diff --git a/tools/logger.py b/dps/utils/logger.py similarity index 100% rename from tools/logger.py rename to dps/utils/logger.py diff --git a/tools/measurement.py b/dps/utils/measurement.py similarity index 92% rename from tools/measurement.py rename to dps/utils/measurement.py index 97ec59a8..df463b3d 100644 --- a/tools/measurement.py +++ b/dps/utils/measurement.py @@ -2,15 +2,16 @@ Provides the classes Measurement and Systematic ''' from __future__ import division -from tools import log -import tools.ROOT_utils -import tools.file_utilities as fu -import tools.hist_utilities as hu -import tools.input as ti +from . import log import copy from rootpy.io.file import Directory +from dps.utils.ROOT_utils import get_histogram_from_file +from dps.utils.file_utilities import make_folder_if_not_exists,\ + write_data_to_JSON, read_data_from_JSON +from dps.utils.input import Input +from dps.utils.hist_utilities import clean_control_region # define logger for this module -meas_log = log["tools.measurement"] +meas_log = log["dps.utils.measurement"] class Measurement(): @@ -72,8 +73,8 @@ def toJSON(self, JSON_file): output = self.toDict() filename = JSON_file.split('/')[-1] directory = JSON_file.replace(filename, '') - fu.make_folder_if_not_exists(directory) - fu.write_data_to_JSON(output, JSON_file) + make_folder_if_not_exists(directory) + write_data_to_JSON(output, JSON_file) @meas_log.trace() def toDict(self): @@ -98,7 +99,7 @@ def toDict(self): @staticmethod def fromJSON(JSON_file): - src = fu.read_data_from_JSON(JSON_file) + src = read_data_from_JSON(JSON_file) m = Measurement.fromDict(src) return m @@ -106,9 +107,9 @@ def fromJSON(JSON_file): @staticmethod def fromDict(d): m = None - if d['class'] == 'tools.measurement.Measurement': + if d['class'] == 'dps.utils.measurement.Measurement': m = Measurement(d['name']) - if d['class'] == 'tools.measurement.Systematic': + if d['class'] == 'dps.utils.measurement.Systematic': m = Systematic(d['name'], d['type'], affected_samples=d['affected_samples'], scale=d['scale']) m.setVariable(d['variable']) @@ -117,7 +118,7 @@ def fromDict(d): m.setMETType(d['met_type']) for sample, i in d['samples'].items(): if i.has_key('input'): - inp = ti.Input(**i['input']) + inp = Input(**i['input']) m.addSample(sample, read=True, input=inp) else: m.addSample(sample, i['file'], i['hist'], read=True) @@ -162,7 +163,7 @@ def getCleanedShape(self, sample): subtract = copy.copy(self.histograms.keys()) subtract.remove(sample) subtract.remove('data') - hist = hu.clean_control_region(self.histograms, + hist = clean_control_region(self.histograms, data_label='data', subtract=subtract, fix_to_zero=True) @@ -187,13 +188,13 @@ def read_sample(self, sample): if self.samples[sample].has_key('input'): i = self.samples[sample]['input'] if isinstance(i, dict): - i = ti.Input(**self.samples[sample]['input']) + i = Input(**self.samples[sample]['input']) self.histograms[sample] = i.read() return input_file = self.samples[sample]['input_file'] if self.samples[sample].has_key('hist'): hist = self.samples[sample]['hist'] - self.histograms[sample] = tools.ROOT_utils.get_histogram_from_file( + self.histograms[sample] = get_histogram_from_file( hist, input_file) @meas_log.trace() diff --git a/tools/pandas_utilities.py b/dps/utils/pandas_utilities.py similarity index 100% rename from tools/pandas_utilities.py rename to dps/utils/pandas_utilities.py diff --git a/tools/plotting.py b/dps/utils/plotting.py similarity index 99% rename from tools/plotting.py rename to dps/utils/plotting.py index ba47471b..890b956f 100644 --- a/tools/plotting.py +++ b/dps/utils/plotting.py @@ -4,21 +4,21 @@ @author: kreczko ''' import matplotlib as mpl -from tools.file_utilities import make_folder_if_not_exists -from tools.file_utilities import saveHistogramsToROOTFile -from tools.hist_utilities import spread_x, graph_to_value_errors_tuplelist -from tools.hist_utilities import get_histogram_ratios +from .file_utilities import make_folder_if_not_exists +from .file_utilities import saveHistogramsToROOTFile +from .hist_utilities import spread_x, graph_to_value_errors_tuplelist +from .hist_utilities import get_histogram_ratios mpl.use('agg') import matplotlib.pyplot as plt import rootpy.plotting.root2matplotlib as rplt from rootpy.plotting import HistStack, Hist, Graph -from config import CMS +from dps.config import CMS from matplotlib.patches import Rectangle from copy import deepcopy import matplotlib.gridspec as gridspec from matplotlib.ticker import MultipleLocator, FixedLocator from itertools import cycle, combinations -from tools.latex import setup_matplotlib +from .latex import setup_matplotlib setup_matplotlib() diff --git a/tools/plotting_utilities.py b/dps/utils/plotting_utilities.py similarity index 98% rename from tools/plotting_utilities.py rename to dps/utils/plotting_utilities.py index 727d2cd9..6c102bbf 100644 --- a/tools/plotting_utilities.py +++ b/dps/utils/plotting_utilities.py @@ -7,7 +7,7 @@ ''' from ROOT import TLegend, TCanvas, TPaveText, gROOT import Styles -import file_utilities +from dps.utils.file_utilities import make_folder_if_not_exists defaultCanvasWidth = 1600 defaultCanvasHeight = 1200 @@ -99,7 +99,7 @@ def saveAs(canvas, name, outputFormats=['png'], outputFolder=''): fullFileName = outputFolder + name + '.' + outputFormat if '/' in fullFileName: path = fullFileName[:fullFileName.rfind('/')] - file_utilities.make_folder_if_not_exists(path) + make_folder_if_not_exists(path) canvas.SaveAs(fullFileName) diff --git a/tools/resolution.py b/dps/utils/resolution.py similarity index 100% rename from tools/resolution.py rename to dps/utils/resolution.py diff --git a/tools/systematic.py b/dps/utils/systematic.py similarity index 98% rename from tools/systematic.py rename to dps/utils/systematic.py index 9ffce305..99ce0890 100644 --- a/tools/systematic.py +++ b/dps/utils/systematic.py @@ -1,8 +1,7 @@ from __future__ import division, print_function -from tools.file_utilities import read_data_from_JSON, write_data_to_JSON, deprecated -from tools.Calculation import combine_errors_in_quadrature -from tools.pandas_utilities import dict_to_df, list_to_series, df_to_file, divide_by_series -from config import XSectionConfig +from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON, deprecated +from dps.utils.Calculation import combine_errors_in_quadrature +from dps.utils.pandas_utilities import dict_to_df, list_to_series, df_to_file, divide_by_series from copy import deepcopy from math import sqrt import numpy as np @@ -511,7 +510,7 @@ def make_covariance_plot( options, systematic, matrix, label='Covariance' ): Take the matrix in list form and bin edges in list form to create a TH2F of the covariance matrix Saves to plots/covariance_matrices/{PhaseSpace}/{Channel}/{Variable}/ ''' - from config.variable_binning import bin_edges_vis + from dps.config.variable_binning import bin_edges_vis from ROOT import TH2F, TCanvas, TPad, gROOT, gStyle from array import array gROOT.SetBatch(True) diff --git a/tools/table.py b/dps/utils/table.py similarity index 97% rename from tools/table.py rename to dps/utils/table.py index b4b97c1a..26971c80 100644 --- a/tools/table.py +++ b/dps/utils/table.py @@ -4,8 +4,8 @@ @author: kreczko ''' from tabulate import tabulate -from tools.logger import log -mylog = log["tools.table"] +from .logger import log +mylog = log["dps.utils.table"] class PrintTable(): diff --git a/tools/toy_mc.py b/dps/utils/toy_mc.py similarity index 97% rename from tools/toy_mc.py rename to dps/utils/toy_mc.py index fc1bfd76..7bfacc4e 100644 --- a/tools/toy_mc.py +++ b/dps/utils/toy_mc.py @@ -5,9 +5,8 @@ ''' from __future__ import division from numpy.random import poisson -import numpy as np from math import sqrt -from hist_utilities import value_error_tuplelist_to_hist +from .hist_utilities import value_error_tuplelist_to_hist from numpy.ma.extras import average def generate_toy_MC_from_distribution( distribution ): diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..b747d9a4 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +rootpy +root-numpy +numpy +matplotlib +nose +sphinx +pytables +uncertainties \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..17d93d45 --- /dev/null +++ b/setup.py @@ -0,0 +1,78 @@ +# Always prefer setuptools over distutils +from setuptools import setup, find_packages + +# To use a consistent encoding +from codecs import open +import os +import re +import io +import glob + +here = os.path.abspath(os.path.dirname(__file__)) +scripts = glob.glob('bin/*') + +# Get the long description from the README file +with open(os.path.join(here, 'README.md'), encoding='utf-8') as f: + long_description = f.read() + + +def read(*names, **kwargs): + with io.open( + os.path.join(os.path.dirname(__file__), *names), + encoding=kwargs.get("encoding", "utf8") + ) as fp: + return fp.read() + + +def find_version(*file_paths): + version_file = read(*file_paths) + version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", + version_file, re.M) + if version_match: + return version_match.group(1) + raise RuntimeError("Unable to find version string.") + + +setup( + name='dps', + # Versions should comply with PEP440. For a discussion on single-sourcing + # the version across setup.py and the project code, see + # https://packaging.python.org/en/latest/single_source_version.html + version=find_version("dps", "__init__.py"), + description='Python scripts for the daily tasks in particle physics', + long_description=long_description, + + # The project's main homepage. + url='https://github.com/BristolTopGroup/DailyPythonScripts', + + author='D. Burns, E. Clement, J. Jacob, L. Kreczko, S. Senkin', + author_email='lkreczko@gmail.com', + + license='Apache 2.0', + + # See https://pypi.python.org/pypi?%3Aaction=list_classifiers + classifiers=[ + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + 'Development Status :: 4 - Beta', + + # Indicate who your project is intended for + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'Topic :: Utilities', + + 'License :: OSI Approved :: Apache Software License', + + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + ], + packages=find_packages(exclude=['contrib', 'docs', 'tests']), + package_data={ + 'bin': ['bin/*'], + }, + scripts=scripts, +) diff --git a/src/cross_section_measurement/tests/test_pick_bins.py b/src/cross_section_measurement/tests/test_pick_bins.py deleted file mode 100644 index 10c7b971..00000000 --- a/src/cross_section_measurement/tests/test_pick_bins.py +++ /dev/null @@ -1,199 +0,0 @@ -''' -Created on 31 Oct 2012 - -@author: kreczko -''' -from __future__ import division -import unittest -from rootpy.plotting import Hist2D -from tools.Calculation import calculate_purities, calculate_stabilities -import importlib -from tools.hist_utilities import rebin_2d - -pick_bins = importlib.import_module( "src.cross_section_measurement.00_pick_bins" ) - - -import numpy as np - -class Test( unittest.TestCase ): - - - def setUp( self ): - - - # create histograms - self.h1 = Hist2D( 60, 40, 100, 60, 40, 100 ) - self.h2 = Hist2D( 60, 40, 100, 60, 40, 100 ) - self.h3 = Hist2D( 60, 40, 100, 60, 40, 100 ) - - n_1 = 10000 - n_2 = int( n_1 / 5 ) - x_1 = 60 + 10 * np.random.randn( n_1 ) - x_2 = 60 + 10 * np.random.randn( n_2 ) - x_3 = 60 + 5 * np.random.randn( n_1 ) - y_1 = x_1 + np.random.randn( n_1 ) - y_2 = x_2 + np.random.randn( n_2 ) - y_3 = x_3 + np.random.randn( n_1 ) - - z_1 = np.vstack( ( x_1, y_1 ) ).T - z_2 = np.vstack( ( x_2, y_2 ) ).T - z_3 = np.vstack( ( x_3, y_3 ) ).T - # fill the histograms with our distributions - self.h1.fill_array( z_1 ) - # reduced number of events - self.h2.fill_array( z_2 ) - # reduced spread - self.h3.fill_array( z_3 ) - - self.histogram_information_1 = [ - {'hist': self.h1, - 'CoM': 7, - 'channel':'test_1'}, - ] - self.histogram_information_2 = [ - {'hist': self.h2, - 'CoM': 7, - 'channel':'test_2'}, - ] - self.histogram_information_3 = [ - {'hist': self.h3, - 'CoM': 7, - 'channel':'test_3'}, - ] - self.histogram_information_1_2 = [ - {'hist': self.h1, - 'CoM': 7, - 'channel':'test_1'}, - {'hist': self.h2, - 'CoM': 7, - 'channel':'test_2'}, - ] - self.histogram_information_1_3 = [ - {'hist': self.h1, - 'CoM': 7, - 'channel':'test_1'}, - {'hist': self.h3, - 'CoM': 7, - 'channel':'test_3'}, - ] - # requirements for new binning - self.p_min, self.s_min, self.n_min = 0.5, 0.5, 100 - self.bin_edges_1, _ = pick_bins.get_best_binning( - self.histogram_information_1, - self.p_min, - self.s_min, - self.n_min - ) - self.bin_edges_2, _ = pick_bins.get_best_binning( - self.histogram_information_2, - self.p_min, - self.s_min, - self.n_min - ) - self.bin_edges_3, _ = pick_bins.get_best_binning( - self.histogram_information_3, - self.p_min, - self.s_min, - self.n_min - ) - self.bin_edges_1_2, _ = pick_bins.get_best_binning( - self.histogram_information_1_2, - self.p_min, - self.s_min, - self.n_min - ) - self.bin_edges_1_3, _ = pick_bins.get_best_binning( - self.histogram_information_1_3, - self.p_min, - self.s_min, - self.n_min - ) - - self.h1_rebinned = rebin_2d(self.h1, self.bin_edges_1, self.bin_edges_1) - self.h2_rebinned = rebin_2d(self.h2, self.bin_edges_2, self.bin_edges_2) - self.h3_rebinned = rebin_2d(self.h3, self.bin_edges_3, self.bin_edges_3) - self.h1_2_rebinned = rebin_2d(self.h1, self.bin_edges_1_2, self.bin_edges_1_2) - self.h1_3_rebinned = rebin_2d(self.h1, self.bin_edges_1_3, self.bin_edges_1_3) - - - def tearDown( self ): - pass - - def test_purities( self ): - purities_1 = calculate_purities( self.h1_rebinned ) - for purity in purities_1: - self.assertGreaterEqual( purity, self.p_min ) - - def test_purities_reduced_N(self): - purities = calculate_purities( self.h2_rebinned ) - for purity in purities: - self.assertGreaterEqual( purity, self.p_min ) - - def test_purities_reduced_spread(self): - purities = calculate_purities( self.h3_rebinned ) - for purity in purities: - self.assertGreaterEqual( purity, self.p_min ) - - def test_purities_combined_1_2(self): - purities = calculate_purities( self.h1_2_rebinned ) - for purity in purities: - self.assertGreaterEqual( purity, self.p_min ) - - def test_purities_combined_1_3(self): - purities = calculate_purities( self.h1_3_rebinned ) - for purity in purities: - self.assertGreaterEqual( purity, self.p_min ) - - def test_stabilities( self ): - stabilities_1 = calculate_stabilities( self.h1_rebinned ) - for stability in stabilities_1: - self.assertGreaterEqual( stability, self.s_min ) - - def test_stabilities_reduced_N(self): - stabilities = calculate_stabilities( self.h2_rebinned ) - for stability in stabilities: - self.assertGreaterEqual( stability, self.s_min ) - - def test_stabilities_reduced_spread(self): - stabilities = calculate_stabilities( self.h3_rebinned ) - for stability in stabilities: - self.assertGreaterEqual( stability, self.s_min ) - - def test_stabilities_combined_1_2(self): - stabilities = calculate_stabilities( self.h1_2_rebinned ) - for stability in stabilities: - self.assertGreaterEqual( stability, self.s_min ) - - def test_stabilities_combined_1_3(self): - stabilities = calculate_stabilities( self.h1_3_rebinned ) - for stability in stabilities: - self.assertGreaterEqual( stability, self.s_min ) - - def test_n_events( self ): - n_events = [self.h1_rebinned.GetBinContent( i, i ) for i in range( 1, len( self.bin_edges_1 ) )] - for N in n_events: - self.assertGreaterEqual( N, self.n_min ) - - def test_n_events_reduced_N(self): - n_events = [self.h2_rebinned.GetBinContent( i, i ) for i in range( 1, len( self.bin_edges_2 ) )] - for N in n_events: - self.assertGreaterEqual( N, self.n_min ) - - def test_n_events_reduced_spread(self): - n_events = [self.h3_rebinned.GetBinContent( i, i ) for i in range( 1, len( self.bin_edges_3 ) )] - for N in n_events: - self.assertGreaterEqual( N, self.n_min ) - - def test_n_events_combined_1_2(self): - n_events = [self.h1_2_rebinned.GetBinContent( i, i ) for i in range( 1, len( self.bin_edges_1_2 ) )] - for N in n_events: - self.assertGreaterEqual( N, self.n_min ) - - def test_n_events_combined_1_3(self): - n_events = [self.h1_3_rebinned.GetBinContent( i, i ) for i in range( 1, len( self.bin_edges_1_3 ))] - for N in n_events: - self.assertGreaterEqual( N, self.n_min ) - -if __name__ == "__main__": - # import sys;sys.argv = ['', 'Test.testTemplates'] - unittest.main() diff --git a/src/file.py b/src/file.py deleted file mode 100644 index 855ed82b..00000000 --- a/src/file.py +++ /dev/null @@ -1,14 +0,0 @@ -''' -Created on 26 Nov 2012 - -@author: kreczko -''' - -def check_for_duplicate_files(path): - pass - -def check_files(files, checks = []): - pass - -def copy_and_merge_ROOT_files(root_files, merged_size, log_file = 'merge.log'): - pass \ No newline at end of file diff --git a/tests/analysis/__init__.py b/tests/analysis/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/unfolding_tests/tests/test_create_unfolding_pull_data.py b/tests/analysis/test_create_unfolding_pull_data.py similarity index 68% rename from src/unfolding_tests/tests/test_create_unfolding_pull_data.py rename to tests/analysis/test_create_unfolding_pull_data.py index d12ad468..4e184c45 100644 --- a/src/unfolding_tests/tests/test_create_unfolding_pull_data.py +++ b/tests/analysis/test_create_unfolding_pull_data.py @@ -4,13 +4,14 @@ @author: phxlk ''' import unittest -import src.unfolding_tests.create_unfolding_pull_data as pull +from dps.analysis.unfolding_tests.create_unfolding_pull_data import create_run_matrix class Test(unittest.TestCase): + @unittest.skip('reported: https://github.com/BristolTopGroup/DailyPythonScripts/issues/332') def testRun_matrixSimple(self): - m = pull.create_run_matrix(10, 10, 0, 0) + m = create_run_matrix(10, 10) self.assertEqual(len(list(m)), 10 * 10) for mc, data in m: self.assertGreaterEqual(mc, 1) @@ -18,8 +19,9 @@ def testRun_matrixSimple(self): self.assertLessEqual(mc, 10) self.assertLessEqual(data, 10) + @unittest.skip('reported: https://github.com/BristolTopGroup/DailyPythonScripts/issues/332') def testRun_matrixWithOffset(self): - m = list(pull.create_run_matrix(10, 10, 5, 5)) + m = list(create_run_matrix(10, 10)) self.assertEqual(len(m), 10 * 10) for mc, data in m: self.assertGreaterEqual(mc, 5) diff --git a/tests/analysis/test_pick_bins.py b/tests/analysis/test_pick_bins.py new file mode 100644 index 00000000..cc1e11a9 --- /dev/null +++ b/tests/analysis/test_pick_bins.py @@ -0,0 +1,214 @@ +''' +Created on 31 Oct 2012 + +@author: kreczko +''' +from __future__ import division +import unittest +import importlib + +import numpy as np +from rootpy.plotting import Hist2D + +from dps.utils.Calculation import calculate_purities, calculate_stabilities +from dps.utils.hist_utilities import rebin_2d + +pick_bins = importlib.import_module("dps.analysis.xsection.00_pick_bins") + + +class Test(unittest.TestCase): + + def setUp(self): + + # create histograms + self.h1 = Hist2D(60, 40, 100, 60, 40, 100) + self.h2 = Hist2D(60, 40, 100, 60, 40, 100) + self.h3 = Hist2D(60, 40, 100, 60, 40, 100) + + n_1 = 10000 + n_2 = int(n_1 / 5) + x_1 = 60 + 10 * np.random.randn(n_1) + x_2 = 60 + 10 * np.random.randn(n_2) + x_3 = 60 + 5 * np.random.randn(n_1) + y_1 = x_1 + np.random.randn(n_1) + y_2 = x_2 + np.random.randn(n_2) + y_3 = x_3 + np.random.randn(n_1) + + z_1 = np.vstack((x_1, y_1)).T + z_2 = np.vstack((x_2, y_2)).T + z_3 = np.vstack((x_3, y_3)).T + # fill the histograms with our distributions + self.h1.fill_array(z_1) + # reduced number of events + self.h2.fill_array(z_2) + # reduced spread + self.h3.fill_array(z_3) + + self.histogram_information_1 = [ + {'hist': self.h1, + 'CoM': 7, + 'channel': 'test_1'}, + ] + self.histogram_information_2 = [ + {'hist': self.h2, + 'CoM': 7, + 'channel': 'test_2'}, + ] + self.histogram_information_3 = [ + {'hist': self.h3, + 'CoM': 7, + 'channel': 'test_3'}, + ] + self.histogram_information_1_2 = [ + {'hist': self.h1, + 'CoM': 7, + 'channel': 'test_1'}, + {'hist': self.h2, + 'CoM': 7, + 'channel': 'test_2'}, + ] + self.histogram_information_1_3 = [ + {'hist': self.h1, + 'CoM': 7, + 'channel': 'test_1'}, + {'hist': self.h3, + 'CoM': 7, + 'channel': 'test_3'}, + ] + # requirements for new binning + self.p_min, self.s_min, self.n_min = 0.5, 0.5, 100 + min_width = 0.000000000000001 + self.bin_edges_1, _ = pick_bins.get_best_binning( + self.histogram_information_1, + self.p_min, + self.s_min, + self.n_min, + min_width, + ) + self.bin_edges_2, _ = pick_bins.get_best_binning( + self.histogram_information_2, + self.p_min, + self.s_min, + self.n_min, + min_width, + ) + self.bin_edges_3, _ = pick_bins.get_best_binning( + self.histogram_information_3, + self.p_min, + self.s_min, + self.n_min, + min_width, + ) + self.bin_edges_1_2, _ = pick_bins.get_best_binning( + self.histogram_information_1_2, + self.p_min, + self.s_min, + self.n_min, + min_width, + ) + self.bin_edges_1_3, _ = pick_bins.get_best_binning( + self.histogram_information_1_3, + self.p_min, + self.s_min, + self.n_min, + min_width, + ) + + self.h1_rebinned = rebin_2d( + self.h1, self.bin_edges_1, self.bin_edges_1) + self.h2_rebinned = rebin_2d( + self.h2, self.bin_edges_2, self.bin_edges_2) + self.h3_rebinned = rebin_2d( + self.h3, self.bin_edges_3, self.bin_edges_3) + self.h1_2_rebinned = rebin_2d( + self.h1, self.bin_edges_1_2, self.bin_edges_1_2) + self.h1_3_rebinned = rebin_2d( + self.h1, self.bin_edges_1_3, self.bin_edges_1_3) + + def tearDown(self): + pass + + def test_purities(self): + purities_1 = calculate_purities(self.h1_rebinned) + for purity in purities_1: + self.assertGreaterEqual(purity, self.p_min) + + def test_purities_reduced_N(self): + purities = calculate_purities(self.h2_rebinned) + for purity in purities: + self.assertGreaterEqual(purity, self.p_min) + + def test_purities_reduced_spread(self): + purities = calculate_purities(self.h3_rebinned) + for purity in purities: + self.assertGreaterEqual(purity, self.p_min) + + def test_purities_combined_1_2(self): + purities = calculate_purities(self.h1_2_rebinned) + for purity in purities: + self.assertGreaterEqual(purity, self.p_min) + + def test_purities_combined_1_3(self): + purities = calculate_purities(self.h1_3_rebinned) + for purity in purities: + self.assertGreaterEqual(purity, self.p_min) + + def test_stabilities(self): + stabilities_1 = calculate_stabilities(self.h1_rebinned) + for stability in stabilities_1: + self.assertGreaterEqual(stability, self.s_min) + + def test_stabilities_reduced_N(self): + stabilities = calculate_stabilities(self.h2_rebinned) + for stability in stabilities: + self.assertGreaterEqual(stability, self.s_min) + + def test_stabilities_reduced_spread(self): + stabilities = calculate_stabilities(self.h3_rebinned) + for stability in stabilities: + self.assertGreaterEqual(stability, self.s_min) + + def test_stabilities_combined_1_2(self): + stabilities = calculate_stabilities(self.h1_2_rebinned) + for stability in stabilities: + self.assertGreaterEqual(stability, self.s_min) + + def test_stabilities_combined_1_3(self): + stabilities = calculate_stabilities(self.h1_3_rebinned) + for stability in stabilities: + self.assertGreaterEqual(stability, self.s_min) + + @unittest.skip('reported: https://github.com/BristolTopGroup/DailyPythonScripts/issues/333') + def test_n_events(self): + n_events = [self.h1_rebinned.GetBinContent( + i, i) for i in range(1, len(self.bin_edges_1))] + for N in n_events: + self.assertGreaterEqual(N, self.n_min) + + def test_n_events_reduced_N(self): + n_events = [self.h2_rebinned.GetBinContent( + i, i) for i in range(1, len(self.bin_edges_2))] + for N in n_events: + self.assertGreaterEqual(N, self.n_min) + + def test_n_events_reduced_spread(self): + n_events = [self.h3_rebinned.GetBinContent( + i, i) for i in range(1, len(self.bin_edges_3))] + for N in n_events: + self.assertGreaterEqual(N, self.n_min) + + def test_n_events_combined_1_2(self): + n_events = [self.h1_2_rebinned.GetBinContent( + i, i) for i in range(1, len(self.bin_edges_1_2))] + for N in n_events: + self.assertGreaterEqual(N, self.n_min) + + def test_n_events_combined_1_3(self): + n_events = [self.h1_3_rebinned.GetBinContent( + i, i) for i in range(1, len(self.bin_edges_1_3))] + for N in n_events: + self.assertGreaterEqual(N, self.n_min) + +if __name__ == "__main__": + # import sys;sys.argv = ['', 'Test.testTemplates'] + unittest.main() diff --git a/tests/config/__init__.py b/tests/config/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/config/test_xsection_config.py b/tests/config/test_xsection_config.py new file mode 100644 index 00000000..bee84a4d --- /dev/null +++ b/tests/config/test_xsection_config.py @@ -0,0 +1,57 @@ +''' +Created on 14 May 2014 + +@author: kreczko +''' +import unittest +from dps.config.xsection import XSectionConfig +from __builtin__ import getattr + + +class Test(unittest.TestCase): + + def setUp(self): +# self.config_7TeV = XSectionConfig(centre_of_mass_energy=7) +# self.config_8TeV = XSectionConfig(centre_of_mass_energy=8) + self.config_13TeV = XSectionConfig(centre_of_mass_energy=13) + + def test_current_analysis_path(self): + self.assertTrue(XSectionConfig.current_analysis_path.endswith('/')) + + def test_paths(self): + self.assertTrue(XSectionConfig.current_analysis_path.endswith('/')) +# self.assertTrue(self.config_7TeV.path_to_files.endswith('/')) +# self.assertTrue( +# self.config_7TeV.path_to_unfolding_histograms.endswith('/')) +# self.assertTrue(self.config_8TeV.path_to_files.endswith('/')) + +# self.assertTrue('7TeV' in self.config_7TeV.path_to_files) +# self.assertTrue('8TeV' in self.config_8TeV.path_to_files) + self.assertTrue('13TeV' in self.config_13TeV.path_to_files) + + unfolding_files = ['unfolding_powheg_pythia_raw', 'unfolding_powheg_herwig_raw', + 'unfolding_mcatnlo_raw', 'unfolding_scale_down_raw', + 'unfolding_scale_up_raw', 'unfolding_matching_down_raw', + 'unfolding_matching_up_raw', ] +# for u_file in unfolding_files: +# full_path = getattr(self.config_7TeV, u_file) +# self.assertEqual(full_path.count('7TeV'), 2) +# full_path = getattr(self.config_8TeV, u_file) +# self.assertEqual(full_path.count('8TeV'), 2) + + def test_invalid_centre_of_mass_energy(self): + self.assertRaises(AttributeError, XSectionConfig, (1232)) + +# def test_luminosity(self): +# self.assertEqual(self.config_7TeV.luminosity, 5050) +# self.assertEqual(self.config_8TeV.luminosity, 19584) +# self.assertEqual(self.config_8TeV.luminosity, 19584) + + def test_parameters(self): + for param in XSectionConfig.parameters: + self.assertTrue( + hasattr(self.config_13TeV, param), 'Parameter ' + param + ' not found.') + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/data/data.py b/tests/data/data.py index f97f71b6..d19df4d7 100644 --- a/tests/data/data.py +++ b/tests/data/data.py @@ -1,24 +1,27 @@ from rootpy.tree import Tree +from rootpy.io import File from random import gauss from rootpy.plotting.hist import Hist, Hist2D import numpy as np -def create_test_tree(): - tree = Tree("test") - tree.create_branches( - {'x': 'F', - 'y': 'F', - 'z': 'F', - 'i': 'I'}) - for i in xrange(10000): - tree.x = gauss(.5, 1.) - tree.y = gauss(.3, 2.) - tree.z = gauss(13., 42.) - tree.i = i - tree.fill() - return tree - +def create_test_tree(filename='test.root'): + with File.open (filename, 'recreate') as f: + tree = Tree("test") + tree.create_branches( + {'x': 'F', + 'y': 'F', + 'z': 'F', + 'i': 'I', + 'EventWeight': "F"}) + for i in xrange(10000): + tree.x = gauss(.5, 1.) + tree.y = gauss(.3, 2.) + tree.z = gauss(13., 42.) + tree.i = i + tree.EventWeight = 1. + tree.fill() + f.write() def create_test_hist(): h = Hist(100, -10, 10) diff --git a/tests/test_rootpy.py b/tests/test_rootpy.py index dd6c8881..397ef51e 100644 --- a/tests/test_rootpy.py +++ b/tests/test_rootpy.py @@ -4,22 +4,38 @@ @author: phxlk ''' import unittest -from tests.data import create_test_hist, create_test_tree +from rootpy.tree import Tree +from .data import create_test_hist from rootpy.io.file import File +from random import gauss class Test(unittest.TestCase): def setUp(self): - f = File('test.root', 'recreate') - f.mkdir('TTbar_plus_X_analysis/EPlusJets/Ref selection', recurse=True) - f.cd('TTbar_plus_X_analysis/EPlusJets/Ref selection') - tree = create_test_tree() - h = create_test_hist() - h.write() - tree.write() - f.write() - f.Close() + with File.open ('test.root', 'recreate') as f: + f.mkdir('TTbar_plus_X_analysis/EPlusJets/Ref selection', recurse=True) + f.cd('TTbar_plus_X_analysis/EPlusJets/Ref selection') + tree = Tree("test") + tree.create_branches( + {'x': 'F', + 'y': 'F', + 'z': 'F', + 'i': 'I', + 'EventWeight': "F"}) + for i in xrange(10000): + tree.x = gauss(.5, 1.) + tree.y = gauss(.3, 2.) + tree.z = gauss(13., 42.) + tree.i = i + tree.EventWeight = 1. + tree.fill() + f.write() + h = create_test_hist() + h.write() + tree.write() + f.write() + f.Close() def tearDown(self): diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tools/tests/test_Calculation.py b/tests/utils/test_Calculation.py similarity index 97% rename from tools/tests/test_Calculation.py rename to tests/utils/test_Calculation.py index 8aca3c76..493bdd71 100644 --- a/tools/tests/test_Calculation.py +++ b/tests/utils/test_Calculation.py @@ -10,9 +10,9 @@ from rootpy.plotting import Hist2D # under test -from tools.Calculation import calculate_purities -from tools.Calculation import calculate_stabilities -from tools.Calculation import decombine_result +from dps.utils.Calculation import calculate_purities +from dps.utils.Calculation import calculate_stabilities +from dps.utils.Calculation import decombine_result class Test( unittest.TestCase ): diff --git a/tools/tests/test_Fitting_FitData.py b/tests/utils/test_Fitting_FitData.py similarity index 98% rename from tools/tests/test_Fitting_FitData.py rename to tests/utils/test_Fitting_FitData.py index d5bda19c..b2e7c43f 100644 --- a/tools/tests/test_Fitting_FitData.py +++ b/tests/utils/test_Fitting_FitData.py @@ -4,11 +4,11 @@ @author: kreczko ''' import unittest -from tools.Fitting import FitData, FitDataCollection +from dps.utils.Fitting import FitData, FitDataCollection from rootpy.plotting import Hist import numpy as np -from tools.hist_utilities import adjust_overflow_to_limit +from dps.utils.hist_utilities import adjust_overflow_to_limit N_bkg1 = 9000 N_signal = 1000 N_bkg1_obs = 10000 diff --git a/tools/tests/test_Fitting_Minuit.py b/tests/utils/test_Fitting_Minuit.py similarity index 97% rename from tools/tests/test_Fitting_Minuit.py rename to tests/utils/test_Fitting_Minuit.py index 99a936a2..f9196f7d 100644 --- a/tools/tests/test_Fitting_Minuit.py +++ b/tests/utils/test_Fitting_Minuit.py @@ -4,9 +4,12 @@ @author: kreczko ''' import unittest -from tools.Fitting import Minuit, FitData, FitDataCollection +from dps.utils.Fitting import Minuit, FitData, FitDataCollection from rootpy.plotting import Hist from math import sqrt +import ROOT +ROOT.gROOT.SetBatch(True) +ROOT.gROOT.ProcessLine( 'gErrorIgnoreLevel = 4000;' ) import numpy as np N_bkg1 = 9000 diff --git a/tools/tests/test_Fitting_RooFitFit.py b/tests/utils/test_Fitting_RooFitFit.py similarity index 97% rename from tools/tests/test_Fitting_RooFitFit.py rename to tests/utils/test_Fitting_RooFitFit.py index 676c112f..9b98a6fb 100644 --- a/tools/tests/test_Fitting_RooFitFit.py +++ b/tests/utils/test_Fitting_RooFitFit.py @@ -4,7 +4,7 @@ @author: kreczko ''' import unittest -from tools.Fitting import RooFitFit, FitData, FitDataCollection +from dps.utils.Fitting import RooFitFit, FitData, FitDataCollection from rootpy.plotting import Hist from math import sqrt diff --git a/tools/tests/test_HistogramProperties.py b/tests/utils/test_HistogramProperties.py similarity index 93% rename from tools/tests/test_HistogramProperties.py rename to tests/utils/test_HistogramProperties.py index 5c8c7281..11f78931 100644 --- a/tools/tests/test_HistogramProperties.py +++ b/tests/utils/test_HistogramProperties.py @@ -3,7 +3,7 @@ @author: kreczko ''' -from tools.plotting import Histogram_properties +from dps.utils.plotting import Histogram_properties def test_init_from_dictionary(): test_values = {} diff --git a/tests/utils/test_Integral_GetBinContent_consistency.py b/tests/utils/test_Integral_GetBinContent_consistency.py new file mode 100644 index 00000000..1b45e5e4 --- /dev/null +++ b/tests/utils/test_Integral_GetBinContent_consistency.py @@ -0,0 +1,108 @@ +from __future__ import division +import unittest +from rootpy.plotting import Hist2D +from dps.utils.Calculation import calculate_purities, calculate_stabilities +import importlib +from dps.utils.hist_utilities import rebin_2d, fix_overflow + +pick_bins = importlib.import_module("dps.analysis.xsection.00_pick_bins") + +import numpy as np + + +class Test(unittest.TestCase): + + def setUp(self): + # create histograms + # self.h1 = Hist2D( 15, 0, 15, 15, 0, 15 ) + # x_1 = [1, 3, 7, 7, 8, 1, 12, 7, 8, 6] + # y_1 = [1, 7, 3, 7, 8, 12, 7, 12, 13, 11] + self.h1 = Hist2D(60, 40, 100, 60, 40, 100) + n_1 = 100000 + x_1 = 60 + 10 * np.random.randn(n_1) + y_1 = x_1 + np.random.randn(n_1) + z_1 = np.vstack((x_1, y_1)).T + self.h1.fill_array(z_1) + + self.h1 = fix_overflow(self.h1) + + self.histogram_information = [ + {'hist': self.h1, + 'CoM': 7, + 'channel': 'test_1'}, + ] + + self.histograms = [info['hist'] for info in self.histogram_information] + + # requirements for new binning + self.p_min, self.s_min, self.n_min = 0.5, 0.5, 1000 + + self.bin_edges = [] + self.purities_GetBinContent = [] + self.stabilities_GetBinContent = [] + self.n_events_GetBinContent = [] + + self.purities_Integral = [] + self.stabilities_Integral = [] + self.n_events_Integral = [] + + first_hist = self.histograms[0] + n_bins = first_hist.GetNbinsX() + + current_bin_start = 0 + current_bin_end = 0 + + while current_bin_end < n_bins: + current_bin_end, p, s, n_gen_and_reco = pick_bins.get_next_end( + self.histograms, current_bin_start, current_bin_end, self.p_min, self.s_min, self.n_min, 0) + if not self.bin_edges: + # if empty + self.bin_edges.append( + first_hist.GetXaxis().GetBinLowEdge(current_bin_start + 1)) + self.bin_edges.append(first_hist.GetXaxis().GetBinLowEdge( + current_bin_end) + first_hist.GetXaxis().GetBinWidth(current_bin_end)) + self.purities_Integral.append(p) + self.stabilities_Integral.append(s) + self.n_events_Integral.append(n_gen_and_reco) + current_bin_start = current_bin_end + + self.h1_rebinned = rebin_2d(self.h1, self.bin_edges, self.bin_edges) + + self.purities_GetBinContent = calculate_purities(self.h1_rebinned) + self.stabilities_GetBinContent = calculate_stabilities( + self.h1_rebinned) + self.n_events_GetBinContent = [ + int(self.h1_rebinned.GetBinContent(i, i)) for i in range(1, len(self.bin_edges))] + + def tearDown(self): + pass + + def test_number_of_bins_equivalence(self): + self.assertEqual( + len(self.n_events_GetBinContent), len(self.n_events_Integral)) + self.assertEqual( + len(self.purities_GetBinContent), len(self.purities_Integral)) + self.assertEqual( + len(self.stabilities_GetBinContent), len(self.stabilities_Integral)) + pass + + @unittest.skip('reported: https://github.com/BristolTopGroup/DailyPythonScripts/issues/333') + def test_number_of_events(self): + for i, n in enumerate(self.n_events_GetBinContent): + self.assertEqual(n, self.n_events_Integral[i]) + pass + + def test_purities_equivalence(self): + for i, p in enumerate(self.purities_GetBinContent): + self.assertEqual(p, self.purities_Integral[i], msg='Calculated with Integral method purity ' + str( + self.purities_Integral[i]) + ' is not equal to GetBinContent one ' + str(p) + ' in bin ' + str(i + 1)) + pass + + def test_stabilities_equivalence(self): + for i, s in enumerate(self.stabilities_GetBinContent): + self.assertEqual(s, self.stabilities_Integral[i], msg='Calculated with Integral method stability ' + str( + self.stabilities_Integral[i]) + ' is not equal to GetBinContent one ' + str(s) + ' in bin ' + str(i + 1)) + pass + +if __name__ == "__main__": + unittest.main() diff --git a/tools/tests/test_ROOT_utils.py b/tests/utils/test_ROOT_utils.py similarity index 76% rename from tools/tests/test_ROOT_utils.py rename to tests/utils/test_ROOT_utils.py index 38ebdd01..29dbe4b8 100644 --- a/tools/tests/test_ROOT_utils.py +++ b/tests/utils/test_ROOT_utils.py @@ -1,19 +1,14 @@ import unittest -import tools.ROOT_utils as ru -from rootpy.io import File -from tests.data import create_test_tree +import dps.utils.ROOT_utils as ru +from ..data import create_test_tree class Test(unittest.TestCase): def setUp(self): - f = File('test.root', 'recreate') - tree = create_test_tree() - tree.write() - f.write() - f.Close() + create_test_tree('test.root') def tearDown(self): pass @@ -22,7 +17,7 @@ def test_get_histogram_from_tree(self): hist = ru.get_histogram_from_tree( tree = 'test', branch = 'x', - weight_branch = 'z', + weight_branches = ['z'], selection_branches = ['i'], input_file = 'test.root', n_bins = 10, diff --git a/tests/utils/test_Unfolding.py b/tests/utils/test_Unfolding.py new file mode 100644 index 00000000..5413a468 --- /dev/null +++ b/tests/utils/test_Unfolding.py @@ -0,0 +1,98 @@ +''' +Created on 15 May 2014 + +@author: senkin +''' +from __future__ import division +import unittest +from rootpy.io import File +from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple +from dps.utils.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist +from dps.utils.ROOT_utils import set_root_defaults +from dps.config.variable_binning import bin_edges_vis as bin_edges + + +class Test(unittest.TestCase): + + def setUp(self): + # load histograms + # @BROKEN: the file is now in the wrong format!! + self.input_file = File('tests/data/unfolding_merged_asymmetric.root') + self.k_value = 3 + self.unfold_method = 'TUnfold' + self.met_type = 'patType1CorrectedPFMet' + self.variables = ['MET', 'WPT', 'MT', 'ST', 'HT'] + self.channels = ['electron', 'muon', 'combined'] + self.dict = {} + for channel in self.channels: + self.dict[channel] = {} + for variable in self.variables: + self.dict[variable] = {} + h_truth, h_measured, h_response, _ = get_unfold_histogram_tuple( + inputfile=self.input_file, + variable=variable, + channel=channel, + met_type=self.met_type) + + unfolding_object = Unfolding(h_truth, + h_measured, + h_response, + k_value=self.k_value, + method=self.unfold_method + ) + + tau_unfolding_object = Unfolding(h_truth, + h_measured, + h_response, + tau=100, + k_value=-1, + method='TUnfold') + + self.dict[channel][variable] = {'h_truth': h_truth, + 'h_measured': h_measured, + 'h_response': h_response, + 'unfolding_object': unfolding_object, + 'tau_unfolding_object': tau_unfolding_object, + } + + def tearDown(self): + pass + + @unittest.skip('skipping until unfolding_merged_asymmetric.root is sorted out') + def test_invalid_zero_data(self): + variable = 'MET' + channel = 'electron' + pseudo_data = value_error_tuplelist_to_hist( + [(0, 0)] * (len(bin_edges[variable]) - 1), bin_edges[variable]) + self.assertRaises(ValueError, self.dict[channel][variable][ + 'unfolding_object'].unfold, (pseudo_data)) + + @unittest.skip('skipping until unfolding_merged_asymmetric.root is sorted out') + def test_tau_closure(self): + for channel in self.channels: + for variable in self.variables: + data = self.dict[channel][variable]['h_measured'] + truth = hist_to_value_error_tuplelist( + self.dict[channel][variable]['h_truth']) + unfolded_result = hist_to_value_error_tuplelist( + self.dict[channel][variable]['tau_unfolding_object'].unfold(data)) + # the difference between the truth and unfolded result should + # be within the unfolding error + for (value, error), (true_value, _) in zip(unfolded_result, truth): + self.assertAlmostEquals(value, true_value, delta=error) +# print value, '+-', error, ' true:', true_value + + @unittest.skip('skipping until unfolding_merged_asymmetric.root is sorted out') + def test_k_to_tau(self): + data = self.dict['electron']['MET']['h_measured'] + tau_unfolding_object = self.dict['electron'][ + 'MET']['tau_unfolding_object'] + # first we need to unfold to get the matrix + tau_unfolding_object.unfold(data) + # next we need to get the actual RooUnfold object + tau = tau_unfolding_object.Impl().kToTau(self.k_value) + self.assertAlmostEqual(tau, 19., delta=1) + +if __name__ == "__main__": + set_root_defaults() + unittest.main() diff --git a/tools/tests/test_calculate_normalised_xsection.py b/tests/utils/test_calculate_normalised_xsection.py similarity index 96% rename from tools/tests/test_calculate_normalised_xsection.py rename to tests/utils/test_calculate_normalised_xsection.py index 750b9941..a895264a 100644 --- a/tools/tests/test_calculate_normalised_xsection.py +++ b/tests/utils/test_calculate_normalised_xsection.py @@ -5,7 +5,7 @@ ''' from __future__ import division import unittest -from tools.Calculation import calculate_normalised_xsection +from dps.utils.Calculation import calculate_normalised_xsection from uncertainties import ufloat class Test( unittest.TestCase ): diff --git a/tools/tests/test_fix_overflow.py b/tests/utils/test_fix_overflow.py similarity index 99% rename from tools/tests/test_fix_overflow.py rename to tests/utils/test_fix_overflow.py index e917c9c9..f7ce1ec6 100644 --- a/tools/tests/test_fix_overflow.py +++ b/tests/utils/test_fix_overflow.py @@ -5,7 +5,7 @@ ''' import unittest from rootpy.plotting import Hist, Hist2D -from tools.hist_utilities import fix_overflow +from dps.utils.hist_utilities import fix_overflow import numpy as np N_bkg1 = 9000 diff --git a/tools/tests/test_hist_utilities.py b/tests/utils/test_hist_utilities.py similarity index 98% rename from tools/tests/test_hist_utilities.py rename to tests/utils/test_hist_utilities.py index 69f8c2ec..8a2e3199 100644 --- a/tools/tests/test_hist_utilities.py +++ b/tests/utils/test_hist_utilities.py @@ -6,7 +6,7 @@ from __future__ import division import unittest from rootpy.plotting import Hist, Hist2D -from tools.hist_utilities import rebin_2d, adjust_overflow_to_limit, hist_to_value_error_tuplelist +from dps.utils.hist_utilities import rebin_2d, adjust_overflow_to_limit import numpy as np N_bkg1 = 9000 diff --git a/tests/utils/test_input.py b/tests/utils/test_input.py new file mode 100644 index 00000000..90f15c58 --- /dev/null +++ b/tests/utils/test_input.py @@ -0,0 +1,85 @@ +''' +Created on 21 Jul 2015 + +@author: phxlk +''' +import unittest +from rootpy.io import File +from ..data import create_test_tree, create_test_hist +import dps.utils.input as ti + + +class Test(unittest.TestCase): + + def setUp(self): + create_test_tree('test.root') + # append a histogram + with File.open ('test.root', 'a+') as f: + h = create_test_hist() + h.write() + f.write() + + def tearDown(self): + pass + + def testValidityTreeInput(self): + i = ti.Input(input_file='test.root', + tree='test', + branch='x', + selection='1', + weight_branch='1') + self.assertTrue(i.isValid()) + + def testValidityHistInput(self): + i = ti.Input(input_file='test.root', + hist='test_hist', + ) + self.assertTrue(i.isValid()) + + def testFailValidityTreeInput(self): + i = ti.Input(input_file='doesnotexist.root', + tree='test', + branch='x', + selection='1', + weight_branch='1') + self.assertFalse(i.isValid()) + + def testFailValidityHistInput(self): + i = ti.Input(input_file='test.root', + hist='doesnotexist', + ) + self.assertFalse(i.isValid()) + + def testReadHist(self): + i = ti.Input(input_file='test.root', + hist='test_hist', + ) + h = i.read() + h_test = create_test_hist() + self.assertEqual(h.nbins(), h_test.nbins()) + + def testReadTree(self): + i = ti.Input(input_file='test.root', + tree='test', + branch='x', + selection='1', + weight_branch='EventWeight', + n_bins=10, + x_min=0, + x_max=10, + ) + h = i.read() + self.assertEqual(h.nbins(), 10) + + def testToDict1(self): + i = ti.Input(input_file='test.root', + hist='test_hist', + ) + d = i.toDict() + expected = {'class': 'dps.utils.input.Input', + 'input_file': 'test.root', 'hist': 'test_hist'} + self.assertEqual(d, expected) + +if __name__ == "__main__": + #import sys;sys.argv = ['', 'Test.testName'] + unittest.main() diff --git a/tools/tests/test_plotting.py b/tests/utils/test_plotting.py similarity index 73% rename from tools/tests/test_plotting.py rename to tests/utils/test_plotting.py index bc596bb6..2c57767c 100644 --- a/tools/tests/test_plotting.py +++ b/tests/utils/test_plotting.py @@ -3,9 +3,9 @@ @author: kreczko ''' -from tools.plotting import get_best_max_y -from tools.hist_utilities import value_errors_tuplelist_to_graph -from tools.hist_utilities import value_error_tuplelist_to_hist +from dps.utils.plotting import get_best_max_y +from dps.utils.hist_utilities import value_errors_tuplelist_to_graph +from dps.utils.hist_utilities import value_error_tuplelist_to_hist data_h = [( 3, 1 ), ( 2, 1 ), diff --git a/tools/.gitignore b/tools/.gitignore deleted file mode 100644 index 0479089c..00000000 --- a/tools/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/__init__.pyc diff --git a/tools/tests/test_Integral_GetBinContent_consistency.py b/tools/tests/test_Integral_GetBinContent_consistency.py deleted file mode 100644 index 674d8a4e..00000000 --- a/tools/tests/test_Integral_GetBinContent_consistency.py +++ /dev/null @@ -1,97 +0,0 @@ -from __future__ import division -import unittest -from rootpy.plotting import Hist2D -from tools.Calculation import calculate_purities, calculate_stabilities -import importlib -from tools.hist_utilities import rebin_2d, fix_overflow - -pick_bins = importlib.import_module( "src.cross_section_measurement.00_pick_bins" ) - -import numpy as np - -class Test( unittest.TestCase ): - - - def setUp( self ): - # create histograms - # self.h1 = Hist2D( 15, 0, 15, 15, 0, 15 ) - # x_1 = [1, 3, 7, 7, 8, 1, 12, 7, 8, 6] - # y_1 = [1, 7, 3, 7, 8, 12, 7, 12, 13, 11] - self.h1 = Hist2D( 60, 40, 100, 60, 40, 100 ) - n_1 = 100000 - x_1 = 60 + 10 * np.random.randn( n_1 ) - y_1 = x_1 + np.random.randn( n_1 ) - z_1 = np.vstack( ( x_1, y_1 ) ).T - self.h1.fill_array( z_1 ) - - self.h1 = fix_overflow( self.h1 ) - - self.histogram_information = [ - {'hist': self.h1, - 'CoM': 7, - 'channel':'test_1'}, - ] - - self.histograms = [info['hist'] for info in self.histogram_information] - - # requirements for new binning - self.p_min, self.s_min, self.n_min = 0.5, 0.5, 1000 - - self.bin_edges = [] - self.purities_GetBinContent = [] - self.stabilities_GetBinContent = [] - self.n_events_GetBinContent = [] - - self.purities_Integral = [] - self.stabilities_Integral = [] - self.n_events_Integral = [] - - first_hist = self.histograms[0] - n_bins = first_hist.GetNbinsX() - - current_bin_start = 0 - current_bin_end = 0 - - while current_bin_end < n_bins: - current_bin_end, p, s, n_gen_and_reco = pick_bins.get_next_end( self.histograms, current_bin_start, current_bin_end, self.p_min, self.s_min, self.n_min ) - if not self.bin_edges: - # if empty - self.bin_edges.append( first_hist.GetXaxis().GetBinLowEdge( current_bin_start + 1 ) ) - self.bin_edges.append( first_hist.GetXaxis().GetBinLowEdge( current_bin_end ) + first_hist.GetXaxis().GetBinWidth( current_bin_end ) ) - self.purities_Integral.append(p) - self.stabilities_Integral.append(s) - self.n_events_Integral.append(n_gen_and_reco) - current_bin_start = current_bin_end - - self.h1_rebinned = rebin_2d(self.h1, self.bin_edges, self.bin_edges) - - self.purities_GetBinContent = calculate_purities( self.h1_rebinned ) - self.stabilities_GetBinContent = calculate_stabilities( self.h1_rebinned ) - self.n_events_GetBinContent = [int( self.h1_rebinned.GetBinContent( i, i ) ) for i in range( 1, len( self.bin_edges ) )] - - def tearDown( self ): - pass - - def test_number_of_bins_equivalence( self ): - self.assertEqual( len(self.n_events_GetBinContent), len(self.n_events_Integral) ) - self.assertEqual( len(self.purities_GetBinContent), len(self.purities_Integral) ) - self.assertEqual( len(self.stabilities_GetBinContent), len(self.stabilities_Integral) ) - pass - - def test_number_of_events( self ): - for i, n in enumerate(self.n_events_GetBinContent): - self.assertEqual( n, self.n_events_Integral[i] ) - pass - - def test_purities_equivalence( self ): - for i, p in enumerate(self.purities_GetBinContent): - self.assertEqual( p, self.purities_Integral[i], msg = 'Calculated with Integral method purity ' + str(self.purities_Integral[i]) + ' is not equal to GetBinContent one ' + str(p) + ' in bin ' + str(i+1) ) - pass - - def test_stabilities_equivalence( self ): - for i, s in enumerate(self.stabilities_GetBinContent): - self.assertEqual( s, self.stabilities_Integral[i], msg = 'Calculated with Integral method stability ' + str(self.stabilities_Integral[i]) + ' is not equal to GetBinContent one ' + str(s) + ' in bin ' + str(i+1) ) - pass - -if __name__ == "__main__": - unittest.main() diff --git a/tools/tests/test_Unfolding.py b/tools/tests/test_Unfolding.py deleted file mode 100644 index 37d71084..00000000 --- a/tools/tests/test_Unfolding.py +++ /dev/null @@ -1,97 +0,0 @@ -''' -Created on 15 May 2014 - -@author: senkin -''' -from __future__ import division -import unittest -from rootpy.io import File -from tools.Unfolding import Unfolding, get_unfold_histogram_tuple -from tools.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist -from tools.ROOT_utils import set_root_defaults -from config.variable_binning import bin_edges - -class Test( unittest.TestCase ): - - def setUp( self ): - # load histograms - self.input_file = File('tests/data/unfolding_merged_asymmetric.root') - self.k_value = 3 - self.unfold_method = 'TUnfold' - self.met_type = 'patType1CorrectedPFMet' - self.variables = ['MET', 'WPT', 'MT' , 'ST', 'HT'] - self.channels = ['electron', 'muon', 'combined'] - self.dict = {} - for channel in self.channels: - self.dict[channel] = {} - for variable in self.variables: - self.dict[variable] = {} - h_truth, h_measured, h_response, _ = get_unfold_histogram_tuple( - inputfile = self.input_file, - variable = variable, - channel = channel, - met_type = self.met_type) - - unfolding_object = Unfolding( h_truth, - h_measured, - h_response, - k_value = self.k_value, - method = self.unfold_method - ) - - tau_unfolding_object = Unfolding( h_truth, - h_measured, - h_response, - tau=100, - k_value= -1, - method='TUnfold') - - self.dict[channel][variable] = {'h_truth' : h_truth, - 'h_measured' : h_measured, - 'h_response' : h_response, - 'unfolding_object' : unfolding_object, - 'tau_unfolding_object': tau_unfolding_object, - } - - def tearDown( self ): - pass - -# def test_closure( self ): -# for channel in self.channels: -# for variable in self.variables: -# # closure test -# unfolded_result = hist_to_value_error_tuplelist( self.dict[channel][variable]['unfolding_object'].closureTest() ) -# truth = hist_to_value_error_tuplelist( self.dict[channel][variable]['h_truth'] ) -# # the difference between the truth and unfolded result should be within the unfolding error -# for (value, error), (true_value, _) in zip(unfolded_result, truth): -# self.assertAlmostEquals(value, true_value, delta = error) - - def test_invalid_zero_data( self ): - variable = 'MET' - channel = 'electron' - pseudo_data = value_error_tuplelist_to_hist( [(0,0)]*( len( bin_edges[variable] ) - 1 ), bin_edges[variable] ) - self.assertRaises(ValueError, self.dict[channel][variable]['unfolding_object'].unfold, (pseudo_data)) - - def test_tau_closure(self): - for channel in self.channels: - for variable in self.variables: - data = self.dict[channel][variable]['h_measured'] - truth = hist_to_value_error_tuplelist( self.dict[channel][variable]['h_truth'] ) - unfolded_result = hist_to_value_error_tuplelist(self.dict[channel][variable]['tau_unfolding_object'].unfold(data)) - # the difference between the truth and unfolded result should be within the unfolding error - for (value, error), (true_value, _) in zip(unfolded_result, truth): - self.assertAlmostEquals(value, true_value, delta = error) -# print value, '+-', error, ' true:', true_value - - def test_k_to_tau(self): - data = self.dict['electron']['MET']['h_measured'] - tau_unfolding_object = self.dict['electron']['MET']['tau_unfolding_object'] - # first we need to unfold to get the matrix - tau_unfolding_object.unfold(data) - # next we need to get the actual RooUnfold object - tau = tau_unfolding_object.Impl().kToTau(self.k_value) - self.assertAlmostEqual(tau, 19., delta = 1) - -if __name__ == "__main__": - set_root_defaults() - unittest.main() diff --git a/tools/tests/test_input.py b/tools/tests/test_input.py deleted file mode 100644 index 6f3b3c88..00000000 --- a/tools/tests/test_input.py +++ /dev/null @@ -1,84 +0,0 @@ -''' -Created on 21 Jul 2015 - -@author: phxlk -''' -import unittest -from rootpy.io import File -from tests.data import create_test_tree, create_test_hist -import tools.input as ti - - -class Test(unittest.TestCase): - - def setUp(self): - f = File('test.root', 'recreate') - tree = create_test_tree() - h = create_test_hist() - h.write() - tree.write() - f.write() - f.Close() - - def tearDown(self): - pass - - def testValidityTreeInput(self): - i = ti.Input(input_file='test.root', - tree='test', - branch='x', - selection='1', - weight_branch='1') - self.assertTrue(i.isValid()) - - def testValidityHistInput(self): - i = ti.Input(input_file='test.root', - hist='test_hist', - ) - self.assertTrue(i.isValid()) - - def testFailValidityTreeInput(self): - i = ti.Input(input_file='doesnotexist.root', - tree='test', - branch='x', - selection='1', - weight_branch='1') - self.assertFalse(i.isValid()) - - def testFailValidityHistInput(self): - i = ti.Input(input_file='test.root', - hist='doesnotexist', - ) - self.assertFalse(i.isValid()) - - def testReadHist(self): - i = ti.Input(input_file='test.root', - hist='test_hist', - ) - h = i.read() - h_test = create_test_hist() - self.assertEqual(h.nbins(), h_test.nbins()) - - def testReadTree(self): - i = ti.Input(input_file='test.root', - tree='test', - branch='x', - selection='1', - weight_branch='1', - n_bins=10, - x_min=0, - x_max=10, - ) - h = i.read() - self.assertEqual(h.nbins(), 10) - - def testToDict1(self): - i = ti.Input(input_file='test.root', - hist='test_hist', - ) - d = i.toDict() - self.assertEqual(d, {'input_file': 'test.root', 'hist': 'test_hist'}) - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main()