Fix logs, events and jupyter flushing on exit

This commit is contained in:
allegroai
2020-01-21 16:41:01 +02:00
parent f0a27127bf
commit 1cc0ea6cf3
6 changed files with 46 additions and 3 deletions

View File

@@ -196,3 +196,19 @@ class Metrics(InterfaceBase):
return self.send(req, raise_on_errors=False)
return None
@staticmethod
def close_async_threads():
global file_upload_pool
global upload_pool
try:
file_upload_pool.close()
file_upload_pool.join()
except:
pass
try:
upload_pool.close()
upload_pool.join()
except:
pass

View File

@@ -92,6 +92,7 @@ class Reporter(InterfaceBase, AbstractContextManager, SetupUploadMixin, AsyncMan
if self.get_num_results() > 0:
self.wait_for_results()
# make sure we flushed everything
self._async_enable = False
self._write()
if self.get_num_results() > 0:
self.wait_for_results()

View File

@@ -113,6 +113,16 @@ class TaskHandler(BufferingHandler):
if batch_requests:
self._thread_pool.apply_async(self._send_events, args=(batch_requests, ))
def wait_for_flush(self):
self.acquire()
try:
self._thread_pool.close()
self._thread_pool.join()
except Exception:
pass
self._thread_pool = ThreadPool(processes=1)
self.release()
def _send_events(self, a_request):
try:
res = self.session.send(a_request)

View File

@@ -220,11 +220,12 @@ class Task(IdObjectBase, AccessMixin, SetupUploadMixin):
self.reload()
# if jupyter is present, requirements will be created in the background, when saving a snapshot
if result.script and script_requirements:
requirements = script_requirements.get_requirements()
requirements, conda_requirements = script_requirements.get_requirements()
if requirements:
if not result.script['requirements']:
result.script['requirements'] = {}
result.script['requirements']['pip'] = requirements
result.script['requirements']['conda'] = conda_requirements
self._update_requirements(result.script.get('requirements') or '')
self.reload()