Skip to content

Commit

Permalink
Merge pull request #23609 from davidlange6/py3_iteriv2
Browse files Browse the repository at this point in the history
itervalues and iteritems replaced by six for python3 compatibility
  • Loading branch information
cmsbuild committed Jul 3, 2018
2 parents 36e702b + dd762d7 commit 15c494f
Show file tree
Hide file tree
Showing 153 changed files with 554 additions and 389 deletions.
Expand Up @@ -3,6 +3,7 @@
from copy import deepcopy
import FWCore.ParameterSet.Config as cms
import FWCore.PythonUtilities.LumiList as LumiList
import six

# Helper functions
def getPSetDict(thePSet):
Expand All @@ -12,7 +13,7 @@ def insertValToPSet(name,val,thePSet):
setattr(thePSet,name,val)

def insertPSetToPSet(inPSet, outPSet):
for key,val in getPSetDict(inPSet).iteritems():
for key,val in getPSetDict(six.iteritems(inPSet)):
insertValToPSet(key,val,outPSet)

def insertPSetToVPSet(inPSet, outVPSet):
Expand Down Expand Up @@ -110,7 +111,7 @@ def parseOptions(self):

def interpretOptions(self):
gttogetpsets=[]
for key,val in self.optdict.iteritems():
for key,val in six.iteritems(self.optdict):
# Get GT name
if key=="gt":
autofind=val.find("auto")
Expand Down
5 changes: 3 additions & 2 deletions Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py
Expand Up @@ -8,6 +8,7 @@
import CondCore.Utilities.conddblib as conddb
from functools import reduce

import six

def create_single_iov_db(inputs, run_number, output_db):
"""Create an sqlite file with single-IOV tags for alignment payloads.
Expand All @@ -19,7 +20,7 @@ def create_single_iov_db(inputs, run_number, output_db):
"""

# find the IOV containing `run_number`
for record,tag in inputs.iteritems():
for record,tag in six.iteritems(inputs):
run_is_covered = False
for iov in reversed(tag["iovs"]):
if iov <= run_number:
Expand All @@ -37,7 +38,7 @@ def create_single_iov_db(inputs, run_number, output_db):
result = {}
remove_existing_object(output_db)

for record,tag in inputs.iteritems():
for record,tag in six.iteritems(inputs):
result[record] = {"connect": "sqlite_file:"+output_db,
"tag": "_".join([tag["tag"], tag["since"]])}

Expand Down
21 changes: 11 additions & 10 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py
Expand Up @@ -16,6 +16,7 @@
from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS
from functools import reduce

import six

################################################################################
def main(argv = None):
Expand Down Expand Up @@ -258,7 +259,7 @@ def _create_mille_jobs(self):
json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)

first_dataset = True
for name, dataset in self._datasets.iteritems():
for name, dataset in six.iteritems(self._datasets):
print "="*75
# Build config from template/Fill in variables
try:
Expand Down Expand Up @@ -439,7 +440,7 @@ def _create_additional_pede_jobs(self):
print "Properly set up the alignment before using the -w option."
sys.exit(1)

firstDataset = next(self._datasets.itervalues())
firstDataset = next(six.itervalues(self._datasets))
config_template = firstDataset["configTemplate"]
collection = firstDataset["collection"]

Expand Down Expand Up @@ -503,7 +504,7 @@ def _create_input_db(self):
run_number, input_db_name)

self._override_gt = ""
for record,tag in tags.iteritems():
for record,tag in six.iteritems(tags):
if self._override_gt == "":
self._override_gt \
+= ("\nimport "
Expand Down Expand Up @@ -558,13 +559,13 @@ def _check_iov_definition(self):
print self._first_run, "!=", iovs[0]
sys.exit(1)

for inp in inputs.itervalues():
for inp in six.itervalues(inputs):
inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])

# check consistency of input with output
problematic_gt_inputs = {}
input_indices = {key: len(value["iovs"]) -1
for key,value in inputs.iteritems()}
for key,value in six.iteritems(inputs)}
for iov in reversed(iovs):
for inp in inputs:
if inputs[inp].pop("problematic", False):
Expand Down Expand Up @@ -604,7 +605,7 @@ def _check_iov_definition(self):

# check consistency of 'TrackerAlignmentRcd' with other inputs
input_indices = {key: len(value["iovs"]) -1
for key,value in inputs.iteritems()
for key,value in six.iteritems(inputs)
if (key != "TrackerAlignmentRcd")
and (inp not in problematic_gt_inputs)}
for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]):
Expand Down Expand Up @@ -666,7 +667,7 @@ def _fetch_defaults(self):
if var == "testMode": continue
print "No '" + var + "' given in [general] section."

for dataset in self._external_datasets.itervalues():
for dataset in six.itervalues(self._external_datasets):
dataset["general"] = {}
for var in ("globaltag", "configTemplate", "json"):
try:
Expand Down Expand Up @@ -701,7 +702,7 @@ def _fetch_datasets(self):
"weight": None}
all_configs.update(self._external_datasets)

for config in all_configs.itervalues():
for config in six.itervalues(all_configs):
global_weight = "1" if config["weight"] is None else config["weight"]
if global_weight+self._config.config_path in self._common_weights:
global_weight = self._common_weights[global_weight+
Expand Down Expand Up @@ -847,8 +848,8 @@ def _fetch_datasets(self):
print "inputfilelist as the number of jobs."

# check if local weights override global weights and resolve name clashes
for weight_name, weight_values in common_weights.iteritems():
for key, weight in weight_dict.iteritems():
for weight_name, weight_values in six.iteritems(common_weights):
for key, weight in six.iteritems(weight_dict):
if any([weight_name in w for w in weight]):
self._common_weights[weight_name+config["config"].config_path] = weight_values
self._weight_dict[key] = [mps_tools.replace_factors(w,
Expand Down
Expand Up @@ -17,6 +17,7 @@

import sys

import six

mps_db = "mps.db" # the mps.db file, default value

Expand Down Expand Up @@ -64,7 +65,7 @@ def get_num_evts_per_merged_dataset(merged_datasets,num_evts_per_dataset):
`merge_datasets' for an explanation of <merged_dataset>.
"""
num_evts_per_merged_dataset = {}
for merged_dataset,datasets in merged_datasets.iteritems():
for merged_dataset,datasets in six.iteritems(merged_datasets):
num_evts = 0
for dataset in datasets:
num_evts = num_evts + num_evts_per_dataset[dataset]
Expand Down Expand Up @@ -104,7 +105,7 @@ def print_merging_scheme(merged_datasets):
of what is meant by merged dataset.
"""
print "Defining the following merged datasets:"
for merged_dataset,datasets in merged_datasets.iteritems():
for merged_dataset,datasets in six.iteritems(merged_datasets):
print "\n `"+merged_dataset+"' from:"
for dataset in datasets:
print " `"+dataset+"'"
Expand Down
Expand Up @@ -3,6 +3,7 @@
import sys
import argparse
import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools
import six

################################################################################
def main(argv = None):
Expand Down Expand Up @@ -32,7 +33,7 @@ def main(argv = None):
["TrackerAlignmentRcd",
"TrackerSurfaceDeformationRcd",
"TrackerAlignmentErrorExtendedRcd"])
for inp in inputs.itervalues():
for inp in six.itervalues(inputs):
inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])
mps_tools.create_single_iov_db(inputs, args.run_number, args.output_db)

Expand Down
5 changes: 3 additions & 2 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py
Expand Up @@ -4,6 +4,7 @@
import subprocess
import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib

import six

def fill_time_info(mps_index, status, cpu_time):
"""Fill timing info in the database for `mps_index`.
Expand Down Expand Up @@ -90,7 +91,7 @@ def fill_time_info(mps_index, status, cpu_time):
job_status[job_id] = {"status": status,
"cpu": cpu_time}

for job_id, job_info in job_status.iteritems():
for job_id, job_info in six.iteritems(job_status):
mps_index = submitted_jobs.get(job_id, -1)
# check for disabled Jobs
disabled = "DISABLED" if "DISABLED" in lib.JOBSTATUS[mps_index] else ""
Expand Down Expand Up @@ -148,7 +149,7 @@ def fill_time_info(mps_index, status, cpu_time):

################################################################################
# check for orphaned jobs
for job_id, mps_index in submitted_jobs.iteritems():
for job_id, mps_index in six.iteritems(submitted_jobs):
for status in ("SETUP", "DONE", "FETCH", "TIMEL", "SUBTD"):
if status in lib.JOBSTATUS[mps_index]:
print "Funny entry index", mps_index, " job", lib.JOBID[mps_index],
Expand Down
Expand Up @@ -5,6 +5,7 @@
import configTemplates
from helperFunctions import conddb, parsecolor, parsestyle, replaceByMap, clean_name
from TkAlExceptions import AllInOneError
import six

class Alignment(object):
condShorts = {
Expand Down Expand Up @@ -231,7 +232,7 @@ def __getConditions( self, theConfig, theSection ):
rcdnames = collections.Counter(condition["rcdName"] for condition in conditions)
if rcdnames and max(rcdnames.values()) >= 2:
raise AllInOneError("Some conditions are specified multiple times (possibly through mp or hp options)!\n"
+ ", ".join(rcdname for rcdname, count in rcdnames.iteritems() if count >= 2))
+ ", ".join(rcdname for rcdname, count in six.iteritems(rcdnames) if count >= 2))

for condition in conditions:
self.__testDbExist(condition["connectString"], condition["tagName"])
Expand Down
Expand Up @@ -3,6 +3,7 @@
import ROOT
import sys
from TkAlExceptions import AllInOneError
import six

####################--- Helpers ---############################
def replaceByMap(target, the_map):
Expand All @@ -23,7 +24,7 @@ def replaceByMap(target, the_map):
result = result.replace(".oO["+key+"]Oo.",the_map[key])
except TypeError: #try a dict
try:
for keykey, value in the_map[key].iteritems():
for keykey, value in six.iteritems(the_map[key]):
result = result.replace(".oO[" + key + "['" + keykey + "']]Oo.", value)
result = result.replace(".oO[" + key + '["' + keykey + '"]]Oo.', value)
except AttributeError: #try a list
Expand Down Expand Up @@ -152,12 +153,12 @@ def cache(function):
cache = {}
def newfunction(*args, **kwargs):
try:
return cache[args, tuple(sorted(kwargs.iteritems()))]
return cache[args, tuple(sorted(six.iteritems(kwargs)))]
except TypeError:
print args, tuple(sorted(kwargs.iteritems()))
print args, tuple(sorted(six.iteritems(kwargs)))
raise
except KeyError:
cache[args, tuple(sorted(kwargs.iteritems()))] = function(*args, **kwargs)
cache[args, tuple(sorted(six.iteritems(kwargs)))] = function(*args, **kwargs)
return newfunction(*args, **kwargs)
newfunction.__name__ = function.__name__
return newfunction
Expand Down
7 changes: 4 additions & 3 deletions Alignment/OfflineValidation/scripts/validateAlignments.py
Expand Up @@ -7,6 +7,7 @@
import shutil
import fnmatch

import six
import Alignment.OfflineValidation.TkAlAllInOneTool.configTemplates \
as configTemplates
import Alignment.OfflineValidation.TkAlAllInOneTool.crabWrapper as crabWrapper
Expand Down Expand Up @@ -325,7 +326,7 @@ def createMergeScript( path, validations, options ):
#prepare dictionary containing handle objects for parallel merge batch jobs
if options.mergeOfflineParallel:
parallelMergeObjects={}
for (validationType, referencename), validations in comparisonLists.iteritems():
for (validationType, referencename), validations in six.iteritems(comparisonLists):
for validation in validations:
#parallel merging
if (isinstance(validation, PreexistingValidation)
Expand Down Expand Up @@ -413,12 +414,12 @@ def createMergeScript( path, validations, options ):
repMap["DownloadData"] = ""

repMap["RunValidationPlots"] = ""
for (validationType, referencename), validations in comparisonLists.iteritems():
for (validationType, referencename), validations in six.iteritems(comparisonLists):
if issubclass(validationType, ValidationWithPlots):
repMap["RunValidationPlots"] += validationType.doRunPlots(validations)

repMap["CompareAlignments"] = "#run comparisons"
for (validationType, referencename), validations in comparisonLists.iteritems():
for (validationType, referencename), validations in six.iteritems(comparisonLists):
if issubclass(validationType, ValidationWithComparison):
repMap["CompareAlignments"] += validationType.doComparison(validations)

Expand Down
3 changes: 2 additions & 1 deletion CalibTracker/SiStripDCS/test/ManualO2OForRestart.py
Expand Up @@ -11,13 +11,14 @@
import datetime
import subprocess
import argparse
import six

def insert_to_file(template, target, replace_dict):
'''Update the template file based on the replace_dict, and write to the target.'''
with open(template, 'r') as input_file:
config=input_file.read()
with open(target, 'w') as output_file:
for key, value in replace_dict.iteritems():
for key, value in six.iteritems(replace_dict):
config = config.replace(key, value)
output_file.write(config)

Expand Down
@@ -1,14 +1,15 @@
import FWCore.ParameterSet.Config as cms

import collections
import six

def customiseEarlyDeleteForCandIsoDeposits(process, products):
# Find the producers

def _branchName(productType, moduleLabel, instanceLabel=""):
return "%s_%s_%s_%s" % (productType, moduleLabel, instanceLabel, process.name_())

for name, module in process.producers_().iteritems():
for name, module in six.iteritems(process.producers_()):
cppType = module._TypedParameterizable__type
if cppType == "CandIsoDepositProducer":
if module.ExtractorPSet.ComponentName in ["CandViewExtractor", "PFCandWithSuperClusterExtractor"] :
Expand Down
Expand Up @@ -2,6 +2,7 @@

import FWCore.ParameterSet.Config as cms
import FWCore.ParameterSet.VarParsing as VarParsing
import six

options = VarParsing.VarParsing()
options.register('connectionString',
Expand Down Expand Up @@ -159,7 +160,7 @@
if process.schedule_() is not None:
process.schedule_().append( process.esout )

for name, module in process.es_sources_().iteritems():
for name, module in six.iteritems(process.es_sources_()):
print "ESModules> provider:%s '%s'" % ( name, module.type_() )
for name, module in process.es_producers_().iteritems():
for name, module in six.iteritems(process.es_producers_()):
print "ESModules> provider:%s '%s'" % ( name, module.type_() )
Expand Up @@ -3,6 +3,7 @@
import FWCore.ParameterSet.Config as cms
import FWCore.ParameterSet.VarParsing as VarParsing
from Configuration.AlCa.autoCond import autoCond
import six

options = VarParsing.VarParsing()
options.register('connectionString',
Expand Down Expand Up @@ -155,7 +156,7 @@
if process.schedule_() is not None:
process.schedule_().append( process.esout )

for name, module in process.es_sources_().iteritems():
for name, module in six.iteritems(process.es_sources_()):
print "ESModules> provider:%s '%s'" % ( name, module.type_() )
for name, module in process.es_producers_().iteritems():
for name, module in six.iteritems(process.es_producers_()):
print "ESModules> provider:%s '%s'" % ( name, module.type_() )
3 changes: 2 additions & 1 deletion CondTools/BTau/python/checkBTagCalibrationConsistency.py
Expand Up @@ -6,6 +6,7 @@
import dataLoader
import ROOT

import six

data = None
check_flavor = True
Expand Down Expand Up @@ -136,7 +137,7 @@ def _check_sys_side(self, op, flav):
assert len(sys_dict) == len(entries)
sys_cent = sys_dict.pop('central', None)
x = discr if op == 3 else pt
for syst, e in sys_dict.iteritems():
for syst, e in six.iteritems(sys_dict):
sys_val = e.tf1_func.Eval(x)
cent_val = sys_cent.tf1_func.Eval(x)
if syst.startswith('up') and not sys_val > cent_val:
Expand Down

0 comments on commit 15c494f

Please sign in to comment.