Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 19 additions & 4 deletions python/paddle/reader/decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,21 @@
import zlib
import paddle.compat as cpt

# On macOS, the 'spawn' start method is now the default in Python3.8 multiprocessing,
# Paddle is currently unable to solve this, so forces the process to start using
# the 'fork' start method.
#
# TODO: This solution is not good, because the fork start method could lead to
# crashes of the subprocess. Figure out how to make 'spawn' work.
#
# For more details, please refer to
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
# https://bugs.python.org/issue33725
if sys.version_info >= (3, ):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If this only fails for Python3.8, could we only use fork when the version_info refers to Python 3.8?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks, done.

fork_context = multiprocessing.get_context('fork')
else:
fork_context = multiprocessing


def cache(reader):
"""
Expand Down Expand Up @@ -560,9 +575,9 @@ def _read_into_queue(reader, queue):
six.reraise(*sys.exc_info())

def queue_reader():
queue = multiprocessing.Queue(queue_size)
queue = fork_context.Queue(queue_size)
for reader in readers:
p = multiprocessing.Process(
p = fork_context.Process(
target=_read_into_queue, args=(reader, queue))
p.start()

Expand Down Expand Up @@ -593,9 +608,9 @@ def _read_into_pipe(reader, conn):
def pipe_reader():
conns = []
for reader in readers:
parent_conn, child_conn = multiprocessing.Pipe()
parent_conn, child_conn = fork_context.Pipe()
conns.append(parent_conn)
p = multiprocessing.Process(
p = fork_context.Process(
target=_read_into_pipe, args=(reader, child_conn))
p.start()

Expand Down