Skip to content

Commit

Permalink
[python] Fix multiprocessing pool on Python 3.8
Browse files Browse the repository at this point in the history
Processes are now spawned instead of forked, which can cause lockups and
lost processes
  • Loading branch information
salkinium committed Jan 7, 2023
1 parent 0046caf commit b7ef1ce
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 0 deletions.
5 changes: 5 additions & 0 deletions test/all/run_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,16 @@
import random
import tempfile
import argparse
import platform
import subprocess
import multiprocessing
from pathlib import Path
from collections import defaultdict

# Set the process create method to fork rather than spawn
if platform.system() != "Windows":
_ = multiprocessing.get_context("fork")

def repopath(path):
return Path(__file__).absolute().parents[2] / path

Expand Down
8 changes: 8 additions & 0 deletions tools/scripts/examples_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
import multiprocessing
from pathlib import Path

# Set the process create method to fork rather than spawn
if platform.system() != "Windows":
_ = multiprocessing.get_context("fork")


is_running_in_ci = (os.getenv("CIRCLECI") is not None or
os.getenv("TRAVIS") is not None or
os.getenv("GITHUB_ACTIONS") is not None)
Expand Down Expand Up @@ -110,20 +115,23 @@ def compile_examples(paths, jobs, split, part):
# first generate all projects
with multiprocessing.Pool(jobs) as pool:
projects = pool.map(generate, projects)
# projects = [generate(p) for p in projects]
results += projects.count(None)

# Filter projects for successful generation
projects = [p for p in projects if p is not None]
# Then build the successfully generated ones
with multiprocessing.Pool(jobs) as pool:
projects = pool.map(build, projects)
# projects = [build(p) for p in projects]
results += projects.count(None)

# Filter projects for successful compilation and runablity
projects = [p for p in projects if p is not None and "CI: run" in p.read_text()]
# Then run the successfully compiled ones
with multiprocessing.Pool(jobs) as pool:
projects = pool.map(run, projects)
# projects = [run(p) for p in projects]
results += projects.count(None)

return results
Expand Down

0 comments on commit b7ef1ce

Please sign in to comment.