Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

hooks: improve multiprocessing hook to work with pytorch #2382

Merged
merged 2 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 15 additions & 8 deletions cx_Freeze/hooks/multiprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@ def load_multiprocessing(_, module: Module) -> None:
"""
# Support for:
# - fork in Unix (including macOS) is native;
# - spawn in Windows is native (since 4.3.4) but was improved in v6.2;
# - spawn and forkserver in Unix is implemented here.
# - spawn in Windows is native since 4.3.4, but was improved in 6.2;
# - spawn and forkserver in Unix is implemented here in 6.15.4 #1956;
# - monkeypath get_context to do automatic freeze_support in 7.1 #2382;
if IS_MINGW or IS_WINDOWS:
return
if module.file.suffix == ".pyc": # source unavailable
Expand All @@ -40,12 +41,18 @@ def load_multiprocessing(_, module: Module) -> None:
if re.search(r"^from multiprocessing.* import main.*", cmd):
exec(cmd)
sys.exit()
# workaround for python docs: run the freeze_support to avoid infinite loop
from multiprocessing.spawn import freeze_support as spawn_freeze_support
spawn_freeze_support()
del spawn_freeze_support
# disable it, cannot run twice
freeze_support = lambda: None
# workaround: inject freeze_support call to avoid an infinite loop
from multiprocessing.spawn import freeze_support as _spawn_freeze_support
from multiprocessing.context import BaseContext
BaseContext._get_context = BaseContext.get_context
def _get_freeze_context(self, method=None):
ctx = self._get_context(method)
_spawn_freeze_support()
return ctx
BaseContext.get_context = \
lambda self, method=None: _get_freeze_context(self, method)
# disable freeze_support, because it cannot be run twice
BaseContext.freeze_support = lambda self: None
# cx_Freeze patch end
"""
code_string = module.file.read_text(encoding="utf_8") + dedent(source)
Expand Down
34 changes: 34 additions & 0 deletions doc/src/faq.rst
Original file line number Diff line number Diff line change
Expand Up @@ -179,3 +179,37 @@ sources.

Or install patchelf from `sources
<https://github.com/NixOS/patchelf#compiling-and-testing>`_.


Multiprocessing support
-----------------------

On Linux and macOS, multiprocessing support is automatically managed by
cx_Freeze, including supporting it in pyTorch.

However, to produce a Windows executable, you must use
`multiprocessing.freeze_support()`.

One needs to call this function straight after the if __name__ == '__main__'
line of the main module. For example:

.. code-block:: python

from multiprocessing import Process, freeze_support


def f():
print("Hello from cx_Freeze")


if __name__ == "__main__":
freeze_support()
Process(target=f).start()

If the freeze_support() line is omitted then trying to run the frozen
executable will raise RuntimeError.

Calling freeze_support() has no effect when invoked on any operating system
other than Windows. In addition, if the module is being run normally by the
Python interpreter on Windows (the program has not been frozen), then
freeze_support() has no effect.
11 changes: 0 additions & 11 deletions source/bases/common.c
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,10 @@ static wchar_t *get_sys_path(wchar_t *lib_dir)
wcscpy(filename, lib_dir);
wcscat(filename, L"\\");
wcscat(filename, L"library.dat");
fprintf(stderr, "--filename: %ls\n", filename);
fprintf(stderr, "--MAXPATHLEN: %zd/%zd\n", wcslen(filename), (size_t)MAXPATHLEN);
if ((fp = _wfopen(filename, L"rt")) != NULL) {
int i = fread(buffer, sizeof(*buffer), sizeof(buffer), fp);
buffer[i] = 0;
fclose(fp);
fprintf(stderr, "--buffer: %s=%zd\n", buffer, strlen(buffer));
wbuffer = Py_DecodeLocale(buffer, NULL);
if (!wbuffer) {
FatalError("Unable to convert path to string!");
Expand All @@ -93,23 +90,18 @@ static wchar_t *get_sys_path(wchar_t *lib_dir)
wcscat(filename, L"\\");
wcscat(filename, wbuffer);
PyMem_RawFree(wbuffer);
fprintf(stderr, "--filename: %ls=%zd\n", filename, wcslen(filename));
wcscat(buf_path, filename);
fprintf(stderr, "--buf_path: %ls\n", buf_path);
}
}
if (wcslen(buf_path) != 0)
wcscat(buf_path, L";");
fprintf(stderr, "--buf_path: %ls\n", buf_path);
wcscat(buf_path, lib_dir);
fprintf(stderr, "--buf_path: %ls\n", buf_path);

sys_path = PyMem_RawMalloc(sizeof(wchar_t) * wcslen(buf_path) + 1);
if (!sys_path) {
FatalError("Out of memory creating sys_path!");
return NULL;
}
fprintf(stderr, "--sys_path: %ls\n", wcscpy(sys_path, buf_path));
return wcscpy(sys_path, buf_path);
}
#else
Expand Down Expand Up @@ -207,16 +199,13 @@ static char *get_sys_path(char *lib_dir)
strcpy(filename, lib_dir);
strcat(filename, "/");
strcat(filename, "library.dat");
fprintf(stderr, "--filename: %s\n", filename);
fprintf(stderr, "--MAXPATHLEN: %d\n", MAXPATHLEN);
if ((fp = fopen(filename, "r")) != NULL) {
int i = fread(buffer, sizeof(*buffer), sizeof(buffer), fp);
buffer[i] = 0;
fclose(fp);
strcpy(filename, lib_dir);
strcat(filename, "/");
strcat(filename, buffer);
fprintf(stderr, "--filename: %s\n", filename);
strcat(buf_path, filename);
}
if (strlen(buf_path) != 0)
Expand Down