Fix deadlock

This commit is contained in:
allegroai 2022-07-28 18:51:48 +03:00
parent 301d68d2d3
commit 969e095478
3 changed files with 24 additions and 12 deletions

View File

@ -1,10 +1,10 @@
import logging
import sys
import threading
from time import time
from ..binding.frameworks import _patched_call # noqa
from ..config import running_remotely, config, DEBUG_SIMULATE_REMOTE_TASK
from ..utilities.process.mp import ForkSafeRLock
class StdStreamPatch(object):
@ -176,8 +176,8 @@ class PrintPatchLogger(object):
Used for capturing and logging stdin and stderr when running in development mode pseudo worker.
"""
patched = False
lock = threading.Lock()
recursion_protect_lock = threading.RLock()
lock = ForkSafeRLock()
recursion_protect_lock = ForkSafeRLock()
cr_flush_period = None
def __init__(self, stream, logger=None, level=logging.INFO, load_config_defaults=True):

View File

@ -76,8 +76,8 @@ class SimpleQueueWrapper(object):
def _patched_put(*a_args, **a_kwargs):
# make sure we flush everything, because after we push the result we will get terminated
try:
task = self.__current_task
task.flush(wait_for_uploads=True)
if self.__current_task and self.__current_task.is_main_task():
self.__current_task.flush(wait_for_uploads=True)
except: # noqa
pass
return getattr(self.__simple_queue, "put")(*a_args, **a_kwargs)
@ -152,6 +152,7 @@ class PatchOsFork(object):
task = None
# check if this is Process Pool function
patched_worker = False
if hasattr(self, "_target"):
# Now we have to patch Pool, because pool terminates subprocess directly after
# the return value of the pool worker function is pushed into the queue,
@ -159,15 +160,22 @@ class PatchOsFork(object):
try:
if self._target == pool.worker: # noqa
self._target = partial(PatchOsFork._patched_pool_worker, pool.worker) # noqa
patched_worker = True
except: # noqa
pass
try:
return PatchOsFork._original_process_run(self, *args, **kwargs)
finally:
# force creating a Task
try:
if task:
try:
if patched_worker:
# remove at exit hooks, we will deadlock when the
# main Pool manager will terminate this process, and it will...
# noinspection PyProtectedMember
task._at_exit_called = True
else:
# terminate the current Task
# noinspection PyProtectedMember
task._at_exit()
except: # noqa

View File

@ -142,6 +142,10 @@ class ForkSafeRLock(_ForkSafeThreadSyncObject):
# Do whatever cleanup.
self.release()
def _is_owned(self):
self._create()
return self._sync._is_owned() # noqa
class ForkSemaphore(_ForkSafeThreadSyncObject):
def __init__(self, value=1):