Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BrokenPipeError #1

Open
yang-Michael opened this issue Mar 4, 2020 · 0 comments
Open

BrokenPipeError #1

yang-Michael opened this issue Mar 4, 2020 · 0 comments

Comments

@yang-Michael
Copy link

yang-Michael commented Mar 4, 2020

I ran the code in the following figure, and the code in the red box reported an error.How can I solve this problem?thanks
image

The error message is as follows:
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\spawn.py", line 105, in spawn_main
exitcode = _main(fd)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\spawn.py", line 114, in _main
prepare(preparation_data)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\spawn.py", line 225, in prepare
_fixup_main_from_path(data['init_main_from_path'])
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\spawn.py", line 277, in _fixup_main_from_path
run_name="mp_main")
File "D:\Program Files (x86)\Anaconda3\lib\runpy.py", line 263, in run_path
pkg_name=pkg_name, script_name=fname)
File "D:\Program Files (x86)\Anaconda3\lib\runpy.py", line 96, in _run_module_code
mod_name, mod_spec, pkg_name, script_name)
File "D:\Program Files (x86)\Anaconda3\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "E:\bylclassLandCaver\fenlei.py", line 21, in
learn.fit_one_cycle(6, slice(lr))
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\train.py", line 23, in fit_one_cycle
learn.fit(cyc_len, max_lr, wd=wd, callbacks=callbacks)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\basic_train.py", line 200, in fit
fit(epochs, self, metrics=self.metrics, callbacks=self.callbacks+callbacks)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\basic_train.py", line 99, in fit
Traceback (most recent call last):
File "D:\Program Files (x86)\PyCharm Community Edition 2019.1.3\helpers\pydev\pydevd.py", line 1758, in
for xb,yb in progress_bar(learn.data.train_dl, parent=pbar):
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastprogress\fastprogress.py", line 47, in iter
raise e
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastprogress\fastprogress.py", line 41, in iter
for i,o in enumerate(self.gen):
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\basic_data.py", line 75, in iter
for b in self.dl: yield self.proc_batch(b)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\torch\utils\data\dataloader.py", line 278, in iter
return _MultiProcessingDataLoaderIter(self)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\torch\utils\data\dataloader.py", line 682, in init
main()
File "D:\Program Files (x86)\PyCharm Community Edition 2019.1.3\helpers\pydev\pydevd.py", line 1752, in main
w.start()
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\process.py", line 112, in start
self._popen = self._Popen(self)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\context.py", line 223, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\context.py", line 322, in _Popen
return Popen(process_obj)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\popen_spawn_win32.py", line 46, in init
globals = debugger.run(setup['file'], None, None, is_module)
File "D:\Program Files (x86)\PyCharm Community Edition 2019.1.3\helpers\pydev\pydevd.py", line 1147, in run
prep_data = spawn.get_preparation_data(process_obj._name)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\spawn.py", line 143, in get_preparation_data
_check_not_importing_main()
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\spawn.py", line 136, in _check_not_importing_main
is not going to be frozen to produce an executable.''')
RuntimeError:
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.

    This probably means that you are not using fork to start your
    child processes and you have forgotten to use the proper idiom
    in the main module:

        if __name__ == '__main__':
            freeze_support()
            ...

    The "freeze_support()" line can be omitted if the program
    is not going to be frozen to produce an executable.
pydev_imports.execfile(file, globals, locals)  # execute the script

File "D:\Program Files (x86)\PyCharm Community Edition 2019.1.3\helpers\pydev_pydev_imps_pydev_execfile.py", line 18, in execfile
exec(compile(contents+"\n", file, 'exec'), glob, loc)
File "E:/bylclassLandCaver/fenlei.py", line 21, in
learn.fit_one_cycle(6, slice(lr))
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\train.py", line 23, in fit_one_cycle
learn.fit(cyc_len, max_lr, wd=wd, callbacks=callbacks)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\basic_train.py", line 200, in fit
fit(epochs, self, metrics=self.metrics, callbacks=self.callbacks+callbacks)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\basic_train.py", line 99, in fit
for xb,yb in progress_bar(learn.data.train_dl, parent=pbar):
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastprogress\fastprogress.py", line 47, in iter
raise e
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastprogress\fastprogress.py", line 41, in iter
for i,o in enumerate(self.gen):
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\fastai\basic_data.py", line 75, in iter
for b in self.dl: yield self.proc_batch(b)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\torch\utils\data\dataloader.py", line 278, in iter
return _MultiProcessingDataLoaderIter(self)
File "D:\Program Files (x86)\Anaconda3\lib\site-packages\torch\utils\data\dataloader.py", line 682, in init
w.start()
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\process.py", line 112, in start
self._popen = self._Popen(self)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\context.py", line 223, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\context.py", line 322, in _Popen
return Popen(process_obj)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\popen_spawn_win32.py", line 89, in init
reduction.dump(process_obj, to_child)
File "D:\Program Files (x86)\Anaconda3\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
BrokenPipeError: [Errno 32] Broken pipe

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant