Skip to content

Commit

Permalink
Merge branch 'master' into pixel_fastsim_10_2_X
Browse files Browse the repository at this point in the history
  • Loading branch information
pmaksim1 committed Jul 14, 2018
2 parents a5467fa + 905af02 commit 6e9b6c3
Show file tree
Hide file tree
Showing 577 changed files with 54,151 additions and 40,788 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from copy import deepcopy
import FWCore.ParameterSet.Config as cms
import FWCore.PythonUtilities.LumiList as LumiList
import six

# Helper functions
def getPSetDict(thePSet):
Expand All @@ -12,7 +13,7 @@ def insertValToPSet(name,val,thePSet):
setattr(thePSet,name,val)

def insertPSetToPSet(inPSet, outPSet):
for key,val in getPSetDict(inPSet).iteritems():
for key,val in getPSetDict(six.iteritems(inPSet)):
insertValToPSet(key,val,outPSet)

def insertPSetToVPSet(inPSet, outVPSet):
Expand Down Expand Up @@ -110,7 +111,7 @@ def parseOptions(self):

def interpretOptions(self):
gttogetpsets=[]
for key,val in self.optdict.iteritems():
for key,val in six.iteritems(self.optdict):
# Get GT name
if key=="gt":
autofind=val.find("auto")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ class MillePedeFileReader {
1000000. }}; // tZ

bool updateDB_{false};
bool vetoUpdateDB_{false};
int Nrec_{0};

std::array<double, 6> Xobs_ = {{0.,0.,0.,0.,0.,0.}};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,9 @@ ::fillExpertHistos()
maxErrortYcut_[detIndex] = myMap[alignable].getErrorThetaYcut() ;

tZcut_[detIndex] = myMap[alignable].getThetaZcut() ;
sigtZcut_[detIndex] = myMap[alignable].getSigThetaYcut() ;
maxMovetZcut_[detIndex] = myMap[alignable].getMaxMoveThetaYcut() ;
maxErrortZcut_[detIndex] = myMap[alignable].getErrorThetaYcut() ;
sigtZcut_[detIndex] = myMap[alignable].getSigThetaZcut() ;
maxMovetZcut_[detIndex] = myMap[alignable].getMaxMoveThetaZcut() ;
maxErrortZcut_[detIndex] = myMap[alignable].getErrorThetaZcut() ;

}

Expand Down
5 changes: 3 additions & 2 deletions Alignment/MillePedeAlignmentAlgorithm/python/mpslib/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import CondCore.Utilities.conddblib as conddb
from functools import reduce

import six

def create_single_iov_db(inputs, run_number, output_db):
"""Create an sqlite file with single-IOV tags for alignment payloads.
Expand All @@ -19,7 +20,7 @@ def create_single_iov_db(inputs, run_number, output_db):
"""

# find the IOV containing `run_number`
for record,tag in inputs.iteritems():
for record,tag in six.iteritems(inputs):
run_is_covered = False
for iov in reversed(tag["iovs"]):
if iov <= run_number:
Expand All @@ -37,7 +38,7 @@ def create_single_iov_db(inputs, run_number, output_db):
result = {}
remove_existing_object(output_db)

for record,tag in inputs.iteritems():
for record,tag in six.iteritems(inputs):
result[record] = {"connect": "sqlite_file:"+output_db,
"tag": "_".join([tag["tag"], tag["since"]])}

Expand Down
21 changes: 11 additions & 10 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from Alignment.MillePedeAlignmentAlgorithm.alignmentsetup.helper import checked_out_MPS
from functools import reduce

import six

################################################################################
def main(argv = None):
Expand Down Expand Up @@ -258,7 +259,7 @@ def _create_mille_jobs(self):
json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)

first_dataset = True
for name, dataset in self._datasets.iteritems():
for name, dataset in six.iteritems(self._datasets):
print "="*75
# Build config from template/Fill in variables
try:
Expand Down Expand Up @@ -439,7 +440,7 @@ def _create_additional_pede_jobs(self):
print "Properly set up the alignment before using the -w option."
sys.exit(1)

firstDataset = next(self._datasets.itervalues())
firstDataset = next(six.itervalues(self._datasets))
config_template = firstDataset["configTemplate"]
collection = firstDataset["collection"]

Expand Down Expand Up @@ -503,7 +504,7 @@ def _create_input_db(self):
run_number, input_db_name)

self._override_gt = ""
for record,tag in tags.iteritems():
for record,tag in six.iteritems(tags):
if self._override_gt == "":
self._override_gt \
+= ("\nimport "
Expand Down Expand Up @@ -558,13 +559,13 @@ def _check_iov_definition(self):
print self._first_run, "!=", iovs[0]
sys.exit(1)

for inp in inputs.itervalues():
for inp in six.itervalues(inputs):
inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])

# check consistency of input with output
problematic_gt_inputs = {}
input_indices = {key: len(value["iovs"]) -1
for key,value in inputs.iteritems()}
for key,value in six.iteritems(inputs)}
for iov in reversed(iovs):
for inp in inputs:
if inputs[inp].pop("problematic", False):
Expand Down Expand Up @@ -604,7 +605,7 @@ def _check_iov_definition(self):

# check consistency of 'TrackerAlignmentRcd' with other inputs
input_indices = {key: len(value["iovs"]) -1
for key,value in inputs.iteritems()
for key,value in six.iteritems(inputs)
if (key != "TrackerAlignmentRcd")
and (inp not in problematic_gt_inputs)}
for iov in reversed(inputs["TrackerAlignmentRcd"]["iovs"]):
Expand Down Expand Up @@ -666,7 +667,7 @@ def _fetch_defaults(self):
if var == "testMode": continue
print "No '" + var + "' given in [general] section."

for dataset in self._external_datasets.itervalues():
for dataset in six.itervalues(self._external_datasets):
dataset["general"] = {}
for var in ("globaltag", "configTemplate", "json"):
try:
Expand Down Expand Up @@ -701,7 +702,7 @@ def _fetch_datasets(self):
"weight": None}
all_configs.update(self._external_datasets)

for config in all_configs.itervalues():
for config in six.itervalues(all_configs):
global_weight = "1" if config["weight"] is None else config["weight"]
if global_weight+self._config.config_path in self._common_weights:
global_weight = self._common_weights[global_weight+
Expand Down Expand Up @@ -847,8 +848,8 @@ def _fetch_datasets(self):
print "inputfilelist as the number of jobs."

# check if local weights override global weights and resolve name clashes
for weight_name, weight_values in common_weights.iteritems():
for key, weight in weight_dict.iteritems():
for weight_name, weight_values in six.iteritems(common_weights):
for key, weight in six.iteritems(weight_dict):
if any([weight_name in w for w in weight]):
self._common_weights[weight_name+config["config"].config_path] = weight_values
self._weight_dict[key] = [mps_tools.replace_factors(w,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import sys

import six

mps_db = "mps.db" # the mps.db file, default value

Expand Down Expand Up @@ -64,7 +65,7 @@ def get_num_evts_per_merged_dataset(merged_datasets,num_evts_per_dataset):
`merge_datasets' for an explanation of <merged_dataset>.
"""
num_evts_per_merged_dataset = {}
for merged_dataset,datasets in merged_datasets.iteritems():
for merged_dataset,datasets in six.iteritems(merged_datasets):
num_evts = 0
for dataset in datasets:
num_evts = num_evts + num_evts_per_dataset[dataset]
Expand Down Expand Up @@ -104,7 +105,7 @@ def print_merging_scheme(merged_datasets):
of what is meant by merged dataset.
"""
print "Defining the following merged datasets:"
for merged_dataset,datasets in merged_datasets.iteritems():
for merged_dataset,datasets in six.iteritems(merged_datasets):
print "\n `"+merged_dataset+"' from:"
for dataset in datasets:
print " `"+dataset+"'"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import sys
import argparse
import Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_tools
import six

################################################################################
def main(argv = None):
Expand Down Expand Up @@ -32,7 +33,7 @@ def main(argv = None):
["TrackerAlignmentRcd",
"TrackerSurfaceDeformationRcd",
"TrackerAlignmentErrorExtendedRcd"])
for inp in inputs.itervalues():
for inp in six.itervalues(inputs):
inp["iovs"] = mps_tools.get_iovs(inp["connect"], inp["tag"])
mps_tools.create_single_iov_db(inputs, args.run_number, args.output_db)

Expand Down
5 changes: 3 additions & 2 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import subprocess
import Alignment.MillePedeAlignmentAlgorithm.mpslib.Mpslibclass as mpslib

import six

def fill_time_info(mps_index, status, cpu_time):
"""Fill timing info in the database for `mps_index`.
Expand Down Expand Up @@ -90,7 +91,7 @@ def fill_time_info(mps_index, status, cpu_time):
job_status[job_id] = {"status": status,
"cpu": cpu_time}

for job_id, job_info in job_status.iteritems():
for job_id, job_info in six.iteritems(job_status):
mps_index = submitted_jobs.get(job_id, -1)
# check for disabled Jobs
disabled = "DISABLED" if "DISABLED" in lib.JOBSTATUS[mps_index] else ""
Expand Down Expand Up @@ -148,7 +149,7 @@ def fill_time_info(mps_index, status, cpu_time):

################################################################################
# check for orphaned jobs
for job_id, mps_index in submitted_jobs.iteritems():
for job_id, mps_index in six.iteritems(submitted_jobs):
for status in ("SETUP", "DONE", "FETCH", "TIMEL", "SUBTD"):
if status in lib.JOBSTATUS[mps_index]:
print "Funny entry index", mps_index, " job", lib.JOBID[mps_index],
Expand Down
13 changes: 7 additions & 6 deletions Alignment/MillePedeAlignmentAlgorithm/src/MillePedeFileReader.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ ::read() {

bool MillePedeFileReader
::storeAlignments() {
return updateDB_;
return (updateDB_&&!vetoUpdateDB_);
}


Expand Down Expand Up @@ -96,6 +96,7 @@ ::readMillePedeResultFile()
}

updateDB_ = false;
vetoUpdateDB_ = false;
std::ifstream resFile;
resFile.open(millePedeResFile_.c_str());

Expand Down Expand Up @@ -175,16 +176,16 @@ ::readMillePedeResultFile()
if (std::abs(ObsMove) > thresholds_[detLabel][alignableIndex]) {
edm::LogWarning("MillePedeFileReader")<<"Aborting payload creation."
<<" Exceeding maximum thresholds for movement: "<<std::abs(ObsMove)<<" for"<< detLabel <<"("<<coord<<")" ;
updateDB_ = false;
break;
vetoUpdateDB_ = true;
continue;

} else if (std::abs(ObsMove) > cutoffs_[detLabel][alignableIndex]) {

if (std::abs(ObsErr) > errors_[detLabel][alignableIndex]) {
edm::LogWarning("MillePedeFileReader")<<"Aborting payload creation."
<<" Exceeding maximum thresholds for error: "<<std::abs(ObsErr)<<" for"<< detLabel <<"("<<coord<<")" ;
updateDB_ = false;
break;
<<" Exceeding maximum thresholds for error: "<<std::abs(ObsErr)<<" for"<< detLabel <<"("<<coord<<")" ;
vetoUpdateDB_ = true;
continue;
} else {
if (std::abs(ObsMove/ObsErr) < significances_[detLabel][alignableIndex]) {
continue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import configTemplates
from helperFunctions import conddb, parsecolor, parsestyle, replaceByMap, clean_name
from TkAlExceptions import AllInOneError
import six

class Alignment(object):
condShorts = {
Expand Down Expand Up @@ -231,7 +232,7 @@ def __getConditions( self, theConfig, theSection ):
rcdnames = collections.Counter(condition["rcdName"] for condition in conditions)
if rcdnames and max(rcdnames.values()) >= 2:
raise AllInOneError("Some conditions are specified multiple times (possibly through mp or hp options)!\n"
+ ", ".join(rcdname for rcdname, count in rcdnames.iteritems() if count >= 2))
+ ", ".join(rcdname for rcdname, count in six.iteritems(rcdnames) if count >= 2))

for condition in conditions:
self.__testDbExist(condition["connectString"], condition["tagName"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import ROOT
import sys
from TkAlExceptions import AllInOneError
import six

####################--- Helpers ---############################
def replaceByMap(target, the_map):
Expand All @@ -23,7 +24,7 @@ def replaceByMap(target, the_map):
result = result.replace(".oO["+key+"]Oo.",the_map[key])
except TypeError: #try a dict
try:
for keykey, value in the_map[key].iteritems():
for keykey, value in six.iteritems(the_map[key]):
result = result.replace(".oO[" + key + "['" + keykey + "']]Oo.", value)
result = result.replace(".oO[" + key + '["' + keykey + '"]]Oo.', value)
except AttributeError: #try a list
Expand Down Expand Up @@ -152,12 +153,12 @@ def cache(function):
cache = {}
def newfunction(*args, **kwargs):
try:
return cache[args, tuple(sorted(kwargs.iteritems()))]
return cache[args, tuple(sorted(six.iteritems(kwargs)))]
except TypeError:
print args, tuple(sorted(kwargs.iteritems()))
print args, tuple(sorted(six.iteritems(kwargs)))
raise
except KeyError:
cache[args, tuple(sorted(kwargs.iteritems()))] = function(*args, **kwargs)
cache[args, tuple(sorted(six.iteritems(kwargs)))] = function(*args, **kwargs)
return newfunction(*args, **kwargs)
newfunction.__name__ = function.__name__
return newfunction
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@
PVValidationScriptTemplate="""
#!/bin/bash
source /afs/cern.ch/cms/caf/setup.sh
export X509_USER_PROXY=.oO[scriptsdir]Oo./.user_proxy
echo -----------------------
echo Job started at `date`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@
zMuMuScriptTemplate="""
#!/bin/bash
source /afs/cern.ch/cms/caf/setup.sh
export X509_USER_PROXY=.oO[scriptsdir]Oo./.user_proxy
echo -----------------------
echo Job started at `date`
Expand Down
7 changes: 4 additions & 3 deletions Alignment/OfflineValidation/scripts/validateAlignments.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import shutil
import fnmatch

import six
import Alignment.OfflineValidation.TkAlAllInOneTool.configTemplates \
as configTemplates
import Alignment.OfflineValidation.TkAlAllInOneTool.crabWrapper as crabWrapper
Expand Down Expand Up @@ -325,7 +326,7 @@ def createMergeScript( path, validations, options ):
#prepare dictionary containing handle objects for parallel merge batch jobs
if options.mergeOfflineParallel:
parallelMergeObjects={}
for (validationType, referencename), validations in comparisonLists.iteritems():
for (validationType, referencename), validations in six.iteritems(comparisonLists):
for validation in validations:
#parallel merging
if (isinstance(validation, PreexistingValidation)
Expand Down Expand Up @@ -413,12 +414,12 @@ def createMergeScript( path, validations, options ):
repMap["DownloadData"] = ""

repMap["RunValidationPlots"] = ""
for (validationType, referencename), validations in comparisonLists.iteritems():
for (validationType, referencename), validations in six.iteritems(comparisonLists):
if issubclass(validationType, ValidationWithPlots):
repMap["RunValidationPlots"] += validationType.doRunPlots(validations)

repMap["CompareAlignments"] = "#run comparisons"
for (validationType, referencename), validations in comparisonLists.iteritems():
for (validationType, referencename), validations in six.iteritems(comparisonLists):
if issubclass(validationType, ValidationWithComparison):
repMap["CompareAlignments"] += validationType.doComparison(validations)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ def update(self):
#try it once more
time.sleep(2)
self.state , self.jobs,self.failureReason = controller.status(self.crab_folder)
self.nJobs = len(self.jobs.keys())
self.nJobs = len(self.jobs)
self.updateJobStats()
if self.state == "NOSTATE":
self.log.debug( "Trying to resubmit because of NOSTATE" )
Expand Down
Loading

0 comments on commit 6e9b6c3

Please sign in to comment.