Skip to content

Commit

Permalink
Merge branch 'multiprocessing_noasync' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
WolfgangWaltenberger committed May 1, 2024
2 parents 82701a2 + 197c0ab commit 5ba3bd4
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 22 deletions.
3 changes: 2 additions & 1 deletion smodels/base/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,6 +317,7 @@ def setDecays(self, decaysDict, promptWidth, stableWidth, ignorePromptQNumbers):
particle.totalwidth = abs(particleData.totalwidth)*GeV
broadWidth = 0.01
if particle.totalwidth > broadWidth*particle.mass:
# Check if the particle can decay to SM only:
logger.warning("Particle %s has a total width/mass = %1.2f. Some results may not be valid for broad resonances!" %(str(particle),float(particle.totalwidth/particle.mass)))
if particle.totalwidth < stableWidth:
particle._isStable = True # Treat particle as stable
Expand Down Expand Up @@ -376,7 +377,7 @@ def setDecays(self, decaysDict, promptWidth, stableWidth, ignorePromptQNumbers):


def updateParticles(self, inputFile, promptWidth = None, stableWidth = None,
roundMasses = 1, ignorePromptQNumbers=[],
roundMasses = 1, ignorePromptQNumbers=[],
minMass=1.0*GeV):
"""
Update mass, total width and branches of allParticles particles from input SLHA or LHE file.
Expand Down
49 changes: 28 additions & 21 deletions smodels/matching/modelTester.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@


from collections import OrderedDict
import multiprocessing
import os
import sys
import time
Expand Down Expand Up @@ -267,7 +266,8 @@ def runSingleFile(inputFile, outputDir, parser, database,


def runSetOfFiles(inputFiles, outputDir, parser, database,
timeout, development, parameterFile):
timeout, development, parameterFile,
return_dict ):
"""
Loop over all input files in inputFiles with testPoint
Expand All @@ -280,14 +280,10 @@ def runSetOfFiles(inputFiles, outputDir, parser, database,
:returns: printers output
"""

output = {}
for inputFile in inputFiles:
output.update(runSingleFile(inputFile, outputDir, parser, database,
timeout, development, parameterFile))
gc.collect()

return output

tmp=runSingleFile(inputFile, outputDir, parser, database,
timeout, development, parameterFile)
return_dict.update ( tmp )

def _cleanList(fileList, inDir):
""" clean up list of files """
Expand Down Expand Up @@ -368,9 +364,11 @@ def testPoints(fileList, inDir, outputDir, parser, database,
logger.addHandler(fileLog)

# Run a single process:
outputDict = runSetOfFiles(cleanedList, outputDir, parser,
database, timeout,
development, parameterFile)
outputDict = {}
runSetOfFiles(cleanedList, outputDir, parser,
database, timeout,
development, parameterFile,
outputDict )
else:
logger.info("Running SModelS for %i files with %i processes. Messages will be redirected to smodels.log"
% (nFiles, ncpus))
Expand All @@ -384,14 +382,26 @@ def testPoints(fileList, inDir, outputDir, parser, database,
# Launch multiple processes.
# Split list of files
chunkedFiles = [cleanedList[x::ncpus] for x in range(ncpus)]
pool = multiprocessing.Pool(processes=ncpus)
children = []
from multiprocessing import Process, Manager
manager = Manager()
outputDict = manager.dict()
for chunkFile in chunkedFiles:
p = pool.apply_async(runSetOfFiles, args=(chunkFile, outputDir, parser,
database, timeout,
development, parameterFile,))
args = ( chunkFile, outputDir, parser, database, timeout,
development, parameterFile, outputDict )
p = Process ( target=runSetOfFiles, args = args )
p.start()

children.append(p)
pool.close()
ctr = 0
nsteps = 10
for p in children:
p.join()
ctr+=1
if ctr % nsteps == 10:
t=(time.time()-t0)/60.
logger.info ( f"{ctr} of {len(children)} processes done in {t:.2f} min" )
"""
iprint, nprint = 5, 5 # Define when to start printing and the percentage step
# Check process progress until they are all finished
while True:
Expand All @@ -407,10 +417,7 @@ def testPoints(fileList, inDir, outputDir, parser, database,
time.sleep(2)
logger.debug("All children terminated")

outputDict = {}
for p in children:
outputDict.update(p.get())
"""

# Collect output to build global summary:
scanSummaryFile = os.path.join(outputDir, 'summary.txt')
Expand Down

0 comments on commit 5ba3bd4

Please sign in to comment.