Skip to content

Commit

Permalink
Merge branch 'master' into optimize-finecalo-cleanedup
Browse files Browse the repository at this point in the history
  • Loading branch information
tklijnsma committed Jul 22, 2021
2 parents 03b7fbb + a880860 commit 7efee66
Show file tree
Hide file tree
Showing 1,222 changed files with 261,449 additions and 235,245 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from copy import deepcopy
import FWCore.ParameterSet.Config as cms
import FWCore.PythonUtilities.LumiList as LumiList
import six

# Helper functions
def getPSetDict(thePSet):
Expand All @@ -14,7 +13,7 @@ def insertValToPSet(name,val,thePSet):
setattr(thePSet,name,val)

def insertPSetToPSet(inPSet, outPSet):
for key,val in getPSetDict(six.iteritems(inPSet)):
for key,val in getPSetDict(inPSet.items()):
insertValToPSet(key,val,outPSet)

def insertPSetToVPSet(inPSet, outVPSet):
Expand Down Expand Up @@ -112,7 +111,7 @@ def parseOptions(self):

def interpretOptions(self):
gttogetpsets=[]
for key,val in six.iteritems(self.optdict):
for key,val in self.optdict.items():
# Get GT name
if key=="gt":
autofind=val.find("auto")
Expand Down
5 changes: 2 additions & 3 deletions Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import CondCore.Utilities.conddblib as conddb
from functools import reduce

import six

def create_single_iov_db(inputs, run_number, output_db):
"""Create an sqlite file with single-IOV tags for alignment payloads.
Expand All @@ -22,7 +21,7 @@ def create_single_iov_db(inputs, run_number, output_db):
"""

# find the IOV containing `run_number`
for record,tag in six.iteritems(inputs):
for record,tag in inputs.items():
run_is_covered = False
for iov in reversed(tag["iovs"]):
if iov <= run_number:
Expand All @@ -40,7 +39,7 @@ def create_single_iov_db(inputs, run_number, output_db):
result = {}
remove_existing_object(output_db)

for record,tag in six.iteritems(inputs):
for record,tag in inputs.items():
result[record] = {"connect": "sqlite_file:"+output_db,
"tag": "_".join([tag["tag"], tag["since"]])}

Expand Down
21 changes: 10 additions & 11 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS
from functools import reduce

import six

################################################################################
def main(argv = None):
Expand Down Expand Up @@ -262,7 +261,7 @@ def _create_mille_jobs(self):
json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)

first_dataset = True
for name, dataset in six.iteritems(self._datasets):
for name, dataset in self._datasets.items():
print("="*75)
# Build config from template/Fill in variables
try:
Expand Down Expand Up @@ -453,7 +452,7 @@ def _create_additional_pede_jobs(self):
print("Properly set up the alignment before using the -w option.")
sys.exit(1)

firstDataset = next(six.itervalues(self._datasets))
firstDataset = next(iter(self._datasets.values()))
config_template = firstDataset["configTemplate"]
collection = firstDataset["collection"]

Expand Down Expand Up @@ -517,7 +516,7 @@ def _create_input_db(self):
run_number, input_db_name)

self._override_gt = ""
for record,tag in six.iteritems(tags):
for record,tag in tags.items():
if self._override_gt == "":
self._override_gt \
+= ("\nimport "
Expand Down Expand Up @@ -572,13 +571,13 @@ def _check_iov_definition(self):
print(self._first_run, "!=", iovs[0])
sys.exit(1)

for inp in six.itervalues(inputs):
for inp in inputs.values():
inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])

# check consistency of input with output
problematic_gt_inputs = {}
input_indices = {key: len(value["iovs"]) -1
for key,value in six.iteritems(inputs)}
for key,value in inputs.items()}
for iov in reversed(iovs):
for inp in inputs:
if inputs[inp].pop("problematic", False):
Expand Down Expand Up @@ -618,7 +617,7 @@ def _check_iov_definition(self):

# check consistency of 'TrackerAlignmentRcd' with other inputs
input_indices = {key: len(value["iovs"]) -1
for key,value in six.iteritems(inputs)
for key,value in inputs.items()
if (key != "TrackerAlignmentRcd")
and (inp not in problematic_gt_inputs)}
for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]):
Expand Down Expand Up @@ -680,7 +679,7 @@ def _fetch_defaults(self):
if var == "testMode": continue
print("No '" + var + "' given in [general] section.")

for dataset in six.itervalues(self._external_datasets):
for dataset in self._external_datasets.values():
dataset["general"] = {}
for var in ("globaltag", "configTemplate", "json"):
try:
Expand Down Expand Up @@ -715,7 +714,7 @@ def _fetch_datasets(self):
"weight": None}
all_configs.update(self._external_datasets)

for config in six.itervalues(all_configs):
for config in all_configs.values():
global_weight = "1" if config["weight"] is None else config["weight"]
if global_weight+self._config.config_path in self._common_weights:
global_weight = self._common_weights[global_weight+
Expand Down Expand Up @@ -865,8 +864,8 @@ def _fetch_datasets(self):
print("inputfilelist as the number of jobs.")

# check if local weights override global weights and resolve name clashes
for weight_name, weight_values in six.iteritems(common_weights):
for key, weight in six.iteritems(weight_dict):
for weight_name, weight_values in common_weights.items():
for key, weight in weight_dict.items():
if any([weight_name in w for w in weight]):
self._common_weights[weight_name+config["config"].config_path] = weight_values
self._weight_dict[key] = [mps_tools.replace_factors(w,
Expand Down
2 changes: 1 addition & 1 deletion Alignment/MillePedeAlignmentAlgorithm/scripts/mps_check.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
#
# This script checks outputs from jobs that have FETCH status and updates if errors occured
# -> check STDOUT files
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

from __future__ import print_function

Expand Down
2 changes: 1 addition & 1 deletion Alignment/MillePedeAlignmentAlgorithm/scripts/mps_fetch.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
#
#
# Fetch jobs that have DONE status
Expand Down
2 changes: 1 addition & 1 deletion Alignment/MillePedeAlignmentAlgorithm/scripts/mps_fire.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Submit jobs that are setup in local mps database to batch system
#
# The bsub sytax: bsub -J 'jobname' -q 'queue name' theProgram
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

""" Print the total number of events processed by the mille jobs per dataset
Expand All @@ -18,7 +18,6 @@

import sys

import six

mps_db = "mps.db" # the mps.db file, default value

Expand Down Expand Up @@ -66,7 +65,7 @@ def get_num_evts_per_merged_dataset(merged_datasets,num_evts_per_dataset):
`merge_datasets' for an explanation of <merged_dataset>.
"""
num_evts_per_merged_dataset = {}
for merged_dataset,datasets in six.iteritems(merged_datasets):
for merged_dataset,datasets in merged_datasets.items():
num_evts = 0
for dataset in datasets:
num_evts = num_evts + num_evts_per_dataset[dataset]
Expand Down Expand Up @@ -106,7 +105,7 @@ def print_merging_scheme(merged_datasets):
of what is meant by merged dataset.
"""
print("Defining the following merged datasets:")
for merged_dataset,datasets in six.iteritems(merged_datasets):
for merged_dataset,datasets in merged_datasets.items():
print("\n `"+merged_dataset+"' from:")
for dataset in datasets:
print(" `"+dataset+"'")
Expand Down
2 changes: 1 addition & 1 deletion Alignment/MillePedeAlignmentAlgorithm/scripts/mps_merge.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
#
# produce cfg file for merging run
#
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

from __future__ import print_function
from builtins import range
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

# Original author: Joerg Behr
# Translation from Perl to Python: Gregor Mittag
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import sys
import argparse
import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools
import six

################################################################################
def main(argv = None):
Expand Down Expand Up @@ -33,7 +32,7 @@ def main(argv = None):
["TrackerAlignmentRcd",
"TrackerSurfaceDeformationRcd",
"TrackerAlignmentErrorExtendedRcd"])
for inp in six.itervalues(inputs):
for inp in inputs.values():
inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])
mps_tools.create_single_iov_db(inputs, args.run_number, args.output_db)

Expand Down
2 changes: 1 addition & 1 deletion Alignment/MillePedeAlignmentAlgorithm/scripts/mps_setup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

from __future__ import print_function
from builtins import range
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

from __future__ import print_function
import os
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
from __future__ import print_function
from builtins import range
import re
Expand Down
2 changes: 1 addition & 1 deletion Alignment/MillePedeAlignmentAlgorithm/scripts/mps_stat.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib
import os

Expand Down
7 changes: 3 additions & 4 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
#!/usr/bin/env python
#!/usr/bin/env python3
from __future__ import print_function
from builtins import range
import os
import re
import subprocess
import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib

import six

def fill_time_info(mps_index, status, cpu_time):
"""Fill timing info in the database for `mps_index`.
Expand Down Expand Up @@ -70,7 +69,7 @@ def fill_time_info(mps_index, status, cpu_time):
job_status[job_id] = {"status": htcondor_jobstatus[status],
"cpu": float(cpu_time)}

for job_id, job_info in six.iteritems(job_status):
for job_id, job_info in job_status.items():
mps_index = submitted_jobs.get(job_id, -1)
# check for disabled Jobs
disabled = "DISABLED" if "DISABLED" in lib.JOBSTATUS[mps_index] else ""
Expand Down Expand Up @@ -118,7 +117,7 @@ def fill_time_info(mps_index, status, cpu_time):

################################################################################
# check for orphaned jobs
for job_id, mps_index in six.iteritems(submitted_jobs):
for job_id, mps_index in submitted_jobs.items():
for status in ("SETUP", "DONE", "FETCH", "TIMEL", "SUBTD"):
if status in lib.JOBSTATUS[mps_index]:
print("Funny entry index", mps_index, " job", lib.JOBID[mps_index], end=' ')
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

##########################################################################
# Create histograms out of treeFile_merge.root . The pede.dump.gz file is
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from . import configTemplates
from .helperFunctions import parsecolor, parsestyle, replaceByMap, clean_name, getTagsMap
from .TkAlExceptions import AllInOneError
import six

class Alignment(object):
condShorts = {
Expand Down Expand Up @@ -246,7 +245,7 @@ def __getConditions( self, theConfig, theSection ):
rcdnames = collections.Counter(condition["rcdName"] for condition in conditions)
if rcdnames and max(rcdnames.values()) >= 2:
raise AllInOneError("Some conditions are specified multiple times (possibly through mp or hp options)!\n"
+ ", ".join(rcdname for rcdname, count in six.iteritems(rcdnames) if count >= 2))
+ ", ".join(rcdname for rcdname, count in rcdnames.items() if count >= 2))

for condition in conditions:
self.__testDbExist(condition["connectString"], condition["tagName"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from .dataset import Dataset
from .helperFunctions import replaceByMap, addIndex, getCommandOutput2, boolfromstring, pythonboolstring
from .TkAlExceptions import AllInOneError
from six import with_metaclass

class ValidationMetaClass(ABCMeta):
sets = ["mandatories", "optionals", "needpackages"]
Expand Down Expand Up @@ -39,7 +38,7 @@ def __new__(cls, clsname, bases, dct):

return super(ValidationMetaClass, cls).__new__(cls, clsname, bases, dct)

class GenericValidation(with_metaclass(ValidationMetaClass,object)):
class GenericValidation(object, metaclass=ValidationMetaClass):
defaultReferenceName = "DEFAULT"
mandatories = set()
defaults = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import sys
from .TkAlExceptions import AllInOneError
import CondCore.Utilities.conddblib as conddblib
import six

####################--- Helpers ---############################
def replaceByMap(target, the_map):
Expand All @@ -28,7 +27,7 @@ def replaceByMap(target, the_map):
result = result.replace(".oO["+key+"]Oo.",the_map[key])
except TypeError: #try a dict
try:
for keykey, value in six.iteritems(the_map[key]):
for keykey, value in the_map[key].items():
result = result.replace(".oO[" + key + "['" + keykey + "']]Oo.", value)
result = result.replace(".oO[" + key + '["' + keykey + '"]]Oo.', value)
except AttributeError: #try a list
Expand Down Expand Up @@ -157,12 +156,12 @@ def cache(function):
cache = {}
def newfunction(*args, **kwargs):
try:
return cache[args, tuple(sorted(six.iteritems(kwargs)))]
return cache[args, tuple(sorted(kwargs.items()))]
except TypeError:
print(args, tuple(sorted(six.iteritems(kwargs))))
print(args, tuple(sorted(kwargs.items())))
raise
except KeyError:
cache[args, tuple(sorted(six.iteritems(kwargs)))] = function(*args, **kwargs)
cache[args, tuple(sorted(kwargs.items()))] = function(*args, **kwargs)
return newfunction(*args, **kwargs)
newfunction.__name__ = function.__name__
return newfunction
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@
from .trackSplittingValidation import TrackSplittingValidation
from .zMuMuValidation import ZMuMuValidation
from .overlapValidation import OverlapValidation
from six import with_metaclass

class BasePlottingOptions(with_metaclass(ValidationMetaClass,object)):
class BasePlottingOptions(object, metaclass=ValidationMetaClass):
defaults = {
"cmssw" : os.environ["CMSSW_BASE"],
"publicationstatus" : "",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
'''
Submits per run Primary Vertex Resoltion Alignment validation using the split vertex method,
usage:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3

'''Script that submits CMS Tracker Alignment Primary Vertex Validation workflows,
usage:
Expand Down

0 comments on commit 7efee66

Please sign in to comment.