Skip to content
Snippets Groups Projects
Commit e4b94aba authored by Samuel GAIST's avatar Samuel GAIST
Browse files

[test][prefix][algorithms] Workaround multiprocess changes in macOS Python 3.8

The default implementation for the multiprocess
changed from fork to spawn on macOS with Python
3.8. This merge request forces the use of fork
for the tests on macOS. The runner will anyway
use Linux so taking more time to solve this
particular issue would not make much sense.
parent c75c4518
No related branches found
No related tags found
1 merge request!81Workaround multiprocess changes in macOS Python 3.8
Pipeline #48789 passed
......@@ -34,6 +34,8 @@
###################################################################################
import multiprocessing
import platform
import sys
def foo(queue_in, queue_out, index):
......@@ -64,14 +66,20 @@ class Algorithm:
num_thread = data_loader.count()
queue_in = multiprocessing.JoinableQueue(num_thread)
version = sys.version_info
if platform.system() == "Darwin" and version[0] == 3 and version[1] >= 8:
ctx = multiprocessing.get_context("fork")
else:
ctx = multiprocessing.get_context()
queue_in = ctx.JoinableQueue(num_thread)
queue_out = []
# Start worker processes
jobs = []
for i in range(num_thread):
queue_out.append(multiprocessing.Queue())
p = multiprocessing.Process(target=foo, args=(queue_in, queue_out[i], i))
queue_out.append(ctx.Queue())
p = ctx.Process(target=foo, args=(queue_in, queue_out[i], i))
jobs.append(p)
p.start()
......
......@@ -34,6 +34,8 @@
###################################################################################
import multiprocessing
import platform
import sys
def foo(queue_in, queue_out, index):
......@@ -64,14 +66,20 @@ class Algorithm:
num_thread = data_loader.count()
queue_in = multiprocessing.JoinableQueue(num_thread)
version = sys.version_info
if platform.system() == "Darwin" and version[0] == 3 and version[1] >= 8:
ctx = multiprocessing.get_context("fork")
else:
ctx = multiprocessing.get_context()
queue_in = ctx.JoinableQueue(num_thread)
queue_out = []
# Start worker processes
jobs = []
for i in range(num_thread):
queue_out.append(multiprocessing.Queue())
p = multiprocessing.Process(target=foo, args=(queue_in, queue_out[i], i))
queue_out.append(ctx.Queue())
p = ctx.Process(target=foo, args=(queue_in, queue_out[i], i))
jobs.append(p)
p.start()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment