mirror of
https://github.com/clearml/clearml-agent
synced 2025-06-26 18:16:15 +00:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b561f6066 | ||
|
|
61232d05dd | ||
|
|
b3418e4496 | ||
|
|
5ef627165c | ||
|
|
98a983d9a2 | ||
|
|
482007c4ce | ||
|
|
98198b8006 | ||
|
|
94bb11a81a | ||
|
|
4158d08f6f | ||
|
|
58ab67ea31 |
@@ -5,7 +5,6 @@ future>=0.16.0
|
||||
humanfriendly>=2.1
|
||||
jsonmodels>=2.2
|
||||
jsonschema>=2.6.0
|
||||
packaging>=16.0
|
||||
pathlib2>=2.3.0
|
||||
psutil>=3.4.2
|
||||
pyhocon>=0.3.38
|
||||
|
||||
@@ -11,7 +11,7 @@ from contextlib import contextmanager
|
||||
from typing import Iterator, ContextManager, Sequence, IO, Text
|
||||
from uuid import uuid4
|
||||
|
||||
from trains_agent.backend_api.services.tasks import Script
|
||||
from trains_agent.backend_api.services import tasks
|
||||
from trains_agent.backend_api.session.client import APIClient
|
||||
from pathlib2 import Path
|
||||
from pytest import fixture
|
||||
@@ -154,7 +154,7 @@ def test_entry_point_warning(client):
|
||||
"""
|
||||
with create_task(
|
||||
client,
|
||||
script=Script(diff="print('hello')", entry_point="foo.py", repository=""),
|
||||
script=tasks.Script(diff="print('hello')", entry_point="foo.py", repository=""),
|
||||
**DEFAULT_TASK_ARGS
|
||||
) as task, iterate_output(SHORT_TIMEOUT, run_task(task)) as output:
|
||||
for line in output:
|
||||
@@ -172,7 +172,7 @@ def test_run_no_dirs(client):
|
||||
script = "print('{}')".format(uuid)
|
||||
with create_task(
|
||||
client,
|
||||
script=Script(diff=script, entry_point="", repository="", working_dir=""),
|
||||
script=tasks.Script(diff=script, entry_point="", repository="", working_dir=""),
|
||||
**DEFAULT_TASK_ARGS
|
||||
) as task, iterate_output(SHORT_TIMEOUT, run_task(task)) as output:
|
||||
search_lines(
|
||||
@@ -196,7 +196,7 @@ def test_run_working_dir(client):
|
||||
script = "print('{}')".format(uuid)
|
||||
with create_task(
|
||||
client,
|
||||
script=Script(
|
||||
script=tasks.Script(
|
||||
diff=script,
|
||||
entry_point="",
|
||||
repository="git@bitbucket.org:seematics/roee_test_git.git",
|
||||
@@ -223,7 +223,7 @@ def test_regular_task(client):
|
||||
"""
|
||||
with create_task(
|
||||
client,
|
||||
script=Script(
|
||||
script=tasks.Script(
|
||||
entry_point="noop.py",
|
||||
repository="git@bitbucket.org:seematics/roee_test_git.git",
|
||||
),
|
||||
@@ -241,7 +241,7 @@ def test_regular_task_nested(client):
|
||||
"""
|
||||
with create_task(
|
||||
client,
|
||||
script=Script(
|
||||
script=tasks.Script(
|
||||
entry_point="noop_nested.py",
|
||||
working_dir="no_reqs",
|
||||
repository="git@bitbucket.org:seematics/roee_test_git.git",
|
||||
|
||||
@@ -17,7 +17,7 @@ from datetime import datetime
|
||||
from distutils.spawn import find_executable
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from tempfile import mkdtemp
|
||||
from tempfile import mkdtemp, gettempdir
|
||||
from time import sleep, time
|
||||
from typing import Text, Optional, Any, Tuple
|
||||
|
||||
@@ -39,8 +39,8 @@ from trains_agent.definitions import (
|
||||
PROGRAM_NAME,
|
||||
DEFAULT_VENV_UPDATE_URL,
|
||||
ENV_TASK_EXECUTE_AS_USER,
|
||||
ENV_K8S_HOST_MOUNT
|
||||
)
|
||||
ENV_K8S_HOST_MOUNT,
|
||||
ENV_TASK_EXTRA_PYTHON_PATH)
|
||||
from trains_agent.definitions import WORKING_REPOSITORY_DIR, PIP_EXTRA_INDICES
|
||||
from trains_agent.errors import APIError, CommandFailedError, Sigterm
|
||||
from trains_agent.helper.base import (
|
||||
@@ -63,9 +63,10 @@ from trains_agent.helper.base import (
|
||||
error,
|
||||
get_python_path,
|
||||
is_linux_platform,
|
||||
rm_file
|
||||
)
|
||||
rm_file,
|
||||
add_python_path)
|
||||
from trains_agent.helper.console import ensure_text, print_text, decode_binary_lines
|
||||
from trains_agent.helper.os.daemonize import daemonize_process
|
||||
from trains_agent.helper.package.base import PackageManager
|
||||
from trains_agent.helper.package.conda_api import CondaAPI
|
||||
from trains_agent.helper.package.horovod_req import HorovodRequirement
|
||||
@@ -626,7 +627,7 @@ class Worker(ServiceCommandSection):
|
||||
self._session.print_configuration()
|
||||
|
||||
@resolve_names
|
||||
def daemon(self, queues, log_level, foreground=False, docker=False, **kwargs):
|
||||
def daemon(self, queues, log_level, foreground=False, docker=False, detached=False, **kwargs):
|
||||
# make sure we only have a single instance,
|
||||
# also make sure we set worker_id properly and cache folders
|
||||
self._singleton()
|
||||
@@ -686,7 +687,18 @@ class Worker(ServiceCommandSection):
|
||||
name
|
||||
)
|
||||
)
|
||||
sys.stdout = sys.stderr = out_file
|
||||
|
||||
if not detached:
|
||||
# redirect std out/err to new file
|
||||
sys.stdout = sys.stderr = out_file
|
||||
else:
|
||||
# in detached mode
|
||||
# fully detach stdin.stdout/stderr and leave main process, running in the background
|
||||
daemonize_process(out_file.fileno())
|
||||
# reprint headers to std file (we are now inside the daemon process)
|
||||
print("Worker \"{}\" :".format(self.worker_id))
|
||||
self._session.print_configuration()
|
||||
print_table(queues_info, columns=columns, titles=columns)
|
||||
|
||||
try:
|
||||
while True:
|
||||
@@ -763,9 +775,13 @@ class Worker(ServiceCommandSection):
|
||||
def _print_file(file_path, prev_line_count):
|
||||
with open(file_path, "rb") as f:
|
||||
binary_text = f.read()
|
||||
if not binary_text:
|
||||
return []
|
||||
# skip the previously printed lines,
|
||||
blines = binary_text.split(b'\n')[prev_line_count:]
|
||||
return decode_binary_lines(blines)
|
||||
if not blines:
|
||||
return blines
|
||||
return decode_binary_lines(blines if blines[-1] else blines[:-1])
|
||||
|
||||
stdout = open(stdout_path, "wt")
|
||||
stderr = open(stderr_path, "wt") if stderr_path else stdout
|
||||
@@ -858,7 +874,7 @@ class Worker(ServiceCommandSection):
|
||||
"""
|
||||
if not lines:
|
||||
return 0
|
||||
print_text("".join(lines))
|
||||
print_text("".join(lines), newline=False)
|
||||
|
||||
# remove backspaces from the text log, they look bad.
|
||||
for i, l in enumerate(lines):
|
||||
@@ -1205,13 +1221,16 @@ class Worker(ServiceCommandSection):
|
||||
# Add the script CWD to the python path
|
||||
python_path = get_python_path(script_dir, execution.entry_point, self.package_api) \
|
||||
if not self.is_conda else None
|
||||
if os.environ.get(ENV_TASK_EXTRA_PYTHON_PATH):
|
||||
python_path = add_python_path(python_path, os.environ.get(ENV_TASK_EXTRA_PYTHON_PATH))
|
||||
if python_path:
|
||||
os.environ['PYTHONPATH'] = python_path
|
||||
|
||||
# check if we want to run as another user, only supported on linux
|
||||
if os.environ.get(ENV_TASK_EXECUTE_AS_USER, None) and is_linux_platform():
|
||||
command, script_dir = self._run_as_user_patch(
|
||||
command, script_dir, venv_folder,
|
||||
command, self._session.config_file,
|
||||
script_dir, venv_folder,
|
||||
self._session.config.get('sdk.storage.cache.default_base_dir'),
|
||||
os.environ.get(ENV_TASK_EXECUTE_AS_USER))
|
||||
use_execv = False
|
||||
@@ -1857,6 +1876,16 @@ class Worker(ServiceCommandSection):
|
||||
log.warning('Failed creating temporary copy of ~/.ssh for git credential')
|
||||
pass
|
||||
|
||||
# check if the .git credentials exist:
|
||||
host_git_credentials = Path('~/.git-credentials').expanduser()
|
||||
try:
|
||||
if host_git_credentials.is_file():
|
||||
host_git_credentials = host_git_credentials.as_posix()
|
||||
else:
|
||||
host_git_credentials = None
|
||||
except Exception:
|
||||
host_git_credentials = None
|
||||
|
||||
# store docker arguments
|
||||
self._docker_image = docker_image
|
||||
self._docker_arguments = docker_arguments
|
||||
@@ -1880,7 +1909,7 @@ class Worker(ServiceCommandSection):
|
||||
python_version=python_version, conf_file=self.temp_config_path,
|
||||
host_apt_cache=host_apt_cache,
|
||||
host_pip_cache=host_pip_cache,
|
||||
host_ssh_cache=host_ssh_cache,
|
||||
host_ssh_cache=host_ssh_cache, host_git_credentials=host_git_credentials,
|
||||
host_cache=host_cache, mounted_cache=mounted_cache_dir,
|
||||
host_pip_dl=host_pip_dl, mounted_pip_dl=mounted_pip_dl_dir,
|
||||
host_vcs_cache=host_vcs_cache, mounted_vcs_cache=mounted_vcs_cache,
|
||||
@@ -1897,7 +1926,7 @@ class Worker(ServiceCommandSection):
|
||||
host_pip_dl, mounted_pip_dl,
|
||||
host_vcs_cache, mounted_vcs_cache,
|
||||
standalone_mode=False, extra_docker_arguments=None, extra_shell_script=None,
|
||||
force_current_version=None):
|
||||
force_current_version=None, host_git_credentials=None):
|
||||
docker = 'docker'
|
||||
|
||||
base_cmd = [docker, 'run', '-t']
|
||||
@@ -1999,6 +2028,7 @@ class Worker(ServiceCommandSection):
|
||||
|
||||
base_cmd += (
|
||||
['-v', conf_file+':/root/trains.conf'] +
|
||||
(['-v', host_git_credentials+':/root/.git-credentials'] if host_git_credentials else []) +
|
||||
(['-v', host_ssh_cache+':/root/.ssh'] if host_ssh_cache else []) +
|
||||
(['-v', host_apt_cache+':/var/cache/apt/archives'] if host_apt_cache else []) +
|
||||
(['-v', host_pip_cache+':/root/.cache/pip'] if host_pip_cache else []) +
|
||||
@@ -2014,7 +2044,7 @@ class Worker(ServiceCommandSection):
|
||||
|
||||
return base_cmd
|
||||
|
||||
def _run_as_user_patch(self, command, script_dir, venv_folder, sdk_cache_folder, user_uid):
|
||||
def _run_as_user_patch(self, command, trains_conf, script_dir, venv_folder, sdk_cache_folder, user_uid):
|
||||
class RunasArgv(Argv):
|
||||
def __init__(self, *args):
|
||||
super(RunasArgv, self).__init__(*args)
|
||||
@@ -2036,14 +2066,20 @@ class Worker(ServiceCommandSection):
|
||||
os.setuid(self.uid)
|
||||
|
||||
# create a home folder for our user
|
||||
trains_agent_home = 'trains_agent_home{}'.format('.'+str(Singleton.get_slot()) if Singleton.get_slot() else '')
|
||||
try:
|
||||
home_folder = '/trains_agent_home'
|
||||
home_folder = (Path('/') / trains_agent_home).absolute().as_posix()
|
||||
rm_tree(home_folder)
|
||||
Path(home_folder).mkdir(parents=True, exist_ok=True)
|
||||
except:
|
||||
home_folder = '/home/trains_agent_home'
|
||||
rm_tree(home_folder)
|
||||
Path(home_folder).mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
home_folder = (Path.home().parent / trains_agent_home).absolute().as_posix()
|
||||
rm_tree(home_folder)
|
||||
Path(home_folder).mkdir(parents=True, exist_ok=True)
|
||||
except:
|
||||
home_folder = (Path(gettempdir()) / trains_agent_home).absolute().as_posix()
|
||||
rm_tree(home_folder)
|
||||
Path(home_folder).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# move our entire venv into the new home
|
||||
venv_folder = venv_folder.as_posix()
|
||||
@@ -2066,12 +2102,22 @@ class Worker(ServiceCommandSection):
|
||||
except:
|
||||
pass
|
||||
|
||||
# make sure we could access the trains_conf file
|
||||
try:
|
||||
user_trains_conf = os.path.join(home_folder, 'trains.conf')
|
||||
shutil.copy(trains_conf, user_trains_conf)
|
||||
Path(user_trains_conf).chmod(0o0777)
|
||||
except:
|
||||
user_trains_conf = trains_conf
|
||||
|
||||
# patch venv folder to new location
|
||||
script_dir = script_dir.replace(venv_folder, new_venv_folder)
|
||||
# New command line execution
|
||||
command = RunasArgv('bash', '-c', 'HOME=\"{}\" PATH=\"{}\" {}'.format(
|
||||
command = RunasArgv('bash', '-c', 'HOME=\"{}\" PATH=\"{}\" PYTHONPATH=\"{}\" TRAINS_CONFIG_FILE={} {}'.format(
|
||||
home_folder,
|
||||
os.environ.get('PATH', '').replace(venv_folder, new_venv_folder),
|
||||
os.environ.get('PYTHONPATH', '').replace(venv_folder, new_venv_folder),
|
||||
user_trains_conf,
|
||||
command.serialize().replace(venv_folder, new_venv_folder)))
|
||||
command.set_uid(user_uid=user_uid, user_gid=user_uid)
|
||||
|
||||
|
||||
@@ -121,6 +121,7 @@ PIP_EXTRA_INDICES = [
|
||||
]
|
||||
DEFAULT_PIP_DOWNLOAD_CACHE = normalize_path(CONFIG_DIR, "pip-download-cache")
|
||||
ENV_TASK_EXECUTE_AS_USER = 'TRAINS_AGENT_EXEC_USER'
|
||||
ENV_TASK_EXTRA_PYTHON_PATH = 'TRAINS_AGENT_EXTRA_PYTHON_PATH'
|
||||
ENV_K8S_HOST_MOUNT = 'TRAINS_AGENT_K8S_HOST_MOUNT'
|
||||
|
||||
|
||||
|
||||
@@ -199,6 +199,20 @@ def get_python_path(script_dir, entry_point, package_api):
|
||||
return None
|
||||
|
||||
|
||||
def add_python_path(base_path, extra_path):
|
||||
try:
|
||||
if not extra_path:
|
||||
return base_path
|
||||
python_path_sep = ';' if is_windows_platform() else ':'
|
||||
base_path = base_path or ''
|
||||
if not base_path.endswith(python_path_sep):
|
||||
base_path += python_path_sep
|
||||
base_path += extra_path.replace(':', python_path_sep)
|
||||
except:
|
||||
pass
|
||||
return base_path
|
||||
|
||||
|
||||
class Singleton(ABCMeta):
|
||||
_instances = {}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from time import sleep
|
||||
import requests
|
||||
import json
|
||||
from threading import Thread
|
||||
from packaging import version as packaging_version
|
||||
from .package.requirements import SimpleVersion
|
||||
from ..version import __version__
|
||||
|
||||
__check_update_thread = None
|
||||
@@ -30,11 +30,11 @@ def _check_new_version_available():
|
||||
return None
|
||||
trains_answer = update_server_releases.get("trains-agent", {})
|
||||
latest_version = trains_answer.get("version")
|
||||
cur_version = packaging_version.parse(cur_version)
|
||||
latest_version = packaging_version.parse(latest_version or '')
|
||||
if cur_version >= latest_version:
|
||||
cur_version = cur_version
|
||||
latest_version = latest_version or ''
|
||||
if SimpleVersion.compare_versions(cur_version, '>=', latest_version):
|
||||
return None
|
||||
patch_upgrade = latest_version.major == cur_version.major and latest_version.minor == cur_version.minor
|
||||
patch_upgrade = True # latest_version.major == cur_version.major and latest_version.minor == cur_version.minor
|
||||
return str(latest_version), patch_upgrade, trains_answer.get("description").split("\r\n")
|
||||
|
||||
|
||||
|
||||
0
trains_agent/helper/os/__init__.py
Normal file
0
trains_agent/helper/os/__init__.py
Normal file
74
trains_agent/helper/os/daemonize.py
Normal file
74
trains_agent/helper/os/daemonize.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import os
|
||||
|
||||
|
||||
def daemonize_process(redirect_fd=None):
|
||||
"""
|
||||
Detach a process from the controlling terminal and run it in the background as a daemon.
|
||||
"""
|
||||
assert redirect_fd is None or isinstance(redirect_fd, int)
|
||||
|
||||
# re-spawn in the same directory
|
||||
WORKDIR = os.getcwd()
|
||||
|
||||
# The standard I/O file descriptors are redirected to /dev/null by default.
|
||||
if hasattr(os, "devnull"):
|
||||
devnull = os.devnull
|
||||
else:
|
||||
devnull = "/dev/null"
|
||||
|
||||
try:
|
||||
# Fork a child process so the parent can exit. This returns control to
|
||||
# the command-line or shell. It also guarantees that the child will not
|
||||
# be a process group leader, since the child receives a new process ID
|
||||
# and inherits the parent's process group ID. This step is required
|
||||
# to insure that the next call to os.setsid is successful.
|
||||
pid = os.fork()
|
||||
except OSError as e:
|
||||
raise Exception("%s [%d]" % (e.strerror, e.errno))
|
||||
|
||||
if pid == 0: # The first child.
|
||||
# To become the session leader of this new session and the process group
|
||||
# leader of the new process group, we call os.setsid().
|
||||
# The process is also guaranteed not to have a controlling terminal.
|
||||
os.setsid()
|
||||
|
||||
# Is ignoring SIGHUP necessary? (Set handlers for asynchronous events.)
|
||||
# import signal
|
||||
# signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
|
||||
try:
|
||||
# Fork a second child and exit immediately to prevent zombies. This
|
||||
# causes the second child process to be orphaned, making the init
|
||||
# process responsible for its cleanup.
|
||||
pid = os.fork() # Fork a second child.
|
||||
except OSError as e:
|
||||
raise Exception("%s [%d]" % (e.strerror, e.errno))
|
||||
|
||||
if pid == 0: # The second child.
|
||||
# Since the current working directory may be a mounted filesystem, we
|
||||
# avoid the issue of not being able to unmount the filesystem at
|
||||
# shutdown time by changing it to the root directory.
|
||||
os.chdir(WORKDIR)
|
||||
# We probably don't want the file mode creation mask inherited from
|
||||
# the parent, so we give the child complete control over permissions.
|
||||
os.umask(0)
|
||||
else:
|
||||
# Exit parent (the first child) of the second child.
|
||||
os._exit(0)
|
||||
else:
|
||||
# Exit parent of the first child.
|
||||
os._exit(0)
|
||||
|
||||
# notice we count on the fact that we keep all file descriptors open,
|
||||
# since we opened then in the parent process, but the daemon process will use them
|
||||
|
||||
# Redirect the standard I/O file descriptors to the specified file /dev/null.
|
||||
if redirect_fd is None:
|
||||
redirect_fd = os.open(devnull, os.O_RDWR)
|
||||
|
||||
# Duplicate standard input to standard output and standard error.
|
||||
# standard output (1), standard error (2)
|
||||
os.dup2(redirect_fd, 1)
|
||||
os.dup2(redirect_fd, 2)
|
||||
|
||||
return 0
|
||||
@@ -14,13 +14,13 @@ import yaml
|
||||
from time import time
|
||||
from attr import attrs, attrib, Factory
|
||||
from pathlib2 import Path
|
||||
from packaging import version as packaging_version
|
||||
from requirements import parse
|
||||
from requirements.requirement import Requirement
|
||||
|
||||
from trains_agent.errors import CommandFailedError
|
||||
from trains_agent.helper.base import rm_tree, NonStrictAttrs, select_for_platform, is_windows_platform
|
||||
from trains_agent.helper.process import Argv, Executable, DEVNULL, CommandSequence, PathLike
|
||||
from trains_agent.helper.package.requirements import SimpleVersion
|
||||
from trains_agent.session import Session
|
||||
from .base import PackageManager
|
||||
from .pip_api.venv import VirtualenvPip
|
||||
@@ -59,7 +59,7 @@ class CondaAPI(PackageManager):
|
||||
A programmatic interface for controlling conda
|
||||
"""
|
||||
|
||||
MINIMUM_VERSION = packaging_version.parse("4.3.30")
|
||||
MINIMUM_VERSION = "4.3.30"
|
||||
|
||||
def __init__(self, session, path, python, requirements_manager):
|
||||
# type: (Session, PathLike, float, RequirementsManager) -> None
|
||||
@@ -93,7 +93,7 @@ class CondaAPI(PackageManager):
|
||||
)
|
||||
)
|
||||
self.conda_version = self.get_conda_version(output)
|
||||
if packaging_version.parse(self.conda_version) < self.MINIMUM_VERSION:
|
||||
if SimpleVersion.compare_versions(self.conda_version, '<', self.MINIMUM_VERSION):
|
||||
raise CommandFailedError(
|
||||
"conda version '{}' is smaller than minimum supported conda version '{}'".format(
|
||||
self.conda_version, self.MINIMUM_VERSION
|
||||
|
||||
@@ -10,11 +10,9 @@ from typing import Text
|
||||
|
||||
import attr
|
||||
import requests
|
||||
from packaging import version as packaging_version
|
||||
from packaging.specifiers import SpecifierSet
|
||||
|
||||
import six
|
||||
from .requirements import SimpleSubstitution, FatalSpecsResolutionError
|
||||
from .requirements import SimpleSubstitution, FatalSpecsResolutionError, SimpleVersion
|
||||
|
||||
OS_TO_WHEEL_NAME = {"linux": "linux_x86_64", "windows": "win_amd64"}
|
||||
|
||||
@@ -156,8 +154,7 @@ class PytorchRequirement(SimpleSubstitution):
|
||||
self.os = os_name or self.get_platform()
|
||||
self.cuda = "cuda{}".format(self.cuda_version).lower()
|
||||
self.python_version_string = str(self.config["agent.default_python"])
|
||||
self.python_major_minor_str = '.'.join(packaging_version.parse(
|
||||
self.python_version_string).base_version.split('.')[:2])
|
||||
self.python_major_minor_str = '.'.join(self.python_version_string.split('.')[:2])
|
||||
if '.' not in self.python_major_minor_str:
|
||||
raise PytorchResolutionError(
|
||||
"invalid python version {!r} defined in configuration file, key 'agent.default_python': "
|
||||
@@ -222,7 +219,6 @@ class PytorchRequirement(SimpleSubstitution):
|
||||
platform_wheel = "win" if self.get_platform() == "windows" else self.get_platform()
|
||||
py_ver = self.python_major_minor_str.replace('.', '')
|
||||
url = None
|
||||
spec = SpecifierSet(req.format_specs())
|
||||
last_v = None
|
||||
# search for our package
|
||||
for l in links_parser.links:
|
||||
@@ -234,10 +230,11 @@ class PytorchRequirement(SimpleSubstitution):
|
||||
# version (ignore +cpu +cu92 etc. + is %2B in the file link)
|
||||
# version ignore .postX suffix (treat as regular version)
|
||||
try:
|
||||
v = packaging_version.parse(parts[1].split('%')[0].split('+')[0])
|
||||
v = str(parts[1].split('%')[0].split('+')[0])
|
||||
except Exception:
|
||||
continue
|
||||
if v not in spec or (last_v and last_v > v):
|
||||
if not req.compare_version(v) or \
|
||||
(last_v and SimpleVersion.compare_versions(last_v, '>', v, ignore_sub_versions=False)):
|
||||
continue
|
||||
if not parts[2].endswith(py_ver):
|
||||
continue
|
||||
@@ -254,9 +251,10 @@ class PytorchRequirement(SimpleSubstitution):
|
||||
if self.config.get("agent.package_manager.system_site_packages"):
|
||||
from pip._internal.commands.show import search_packages_info
|
||||
installed_torch = list(search_packages_info([req.name]))
|
||||
op, version = req.specs[0] if req.specs else (None, None)
|
||||
# notice the comparision order, the first part will make sure we have a valid installed package
|
||||
if installed_torch[0]['version'] and (installed_torch[0]['version'] == version or not version):
|
||||
if installed_torch[0]['version'] and req.compare_version(installed_torch[0]['version']):
|
||||
print('PyTorch: requested "{}" version {}, using pre-installed version {}'.format(
|
||||
req.name, req.specs[0] if req.specs else 'unspecified', installed_torch[0]['version']))
|
||||
# package already installed, do nothing
|
||||
return str(req), True
|
||||
except:
|
||||
@@ -306,20 +304,17 @@ class PytorchRequirement(SimpleSubstitution):
|
||||
@staticmethod
|
||||
def match_version(req, options):
|
||||
versioned_options = sorted(
|
||||
((packaging_version.parse(fix_version(key)), value) for key, value in options.items()),
|
||||
((fix_version(key), value) for key, value in options.items()),
|
||||
key=itemgetter(0),
|
||||
reverse=True,
|
||||
)
|
||||
req.specs = [(op, fix_version(version)) for op, version in req.specs]
|
||||
if req.specs:
|
||||
specs = SpecifierSet(req.format_specs())
|
||||
else:
|
||||
specs = None
|
||||
|
||||
try:
|
||||
return next(
|
||||
replacement
|
||||
for version, replacement in versioned_options
|
||||
if not specs or version in specs
|
||||
if req.compare_version(version)
|
||||
)
|
||||
except StopIteration:
|
||||
raise PytorchResolutionError(
|
||||
|
||||
@@ -10,7 +10,6 @@ from operator import itemgetter
|
||||
from os import path
|
||||
from typing import Text, List, Type, Optional, Tuple, Dict
|
||||
|
||||
from packaging import version as packaging_version
|
||||
from pathlib2 import Path
|
||||
from pyhocon import ConfigTree
|
||||
from requirements import parse
|
||||
@@ -69,8 +68,19 @@ class MarkerRequirement(object):
|
||||
def __repr__(self):
|
||||
return '{self.__class__.__name__}[{self}]'.format(self=self)
|
||||
|
||||
def format_specs(self):
|
||||
return ','.join(starmap(operator.add, self.specs))
|
||||
def format_specs(self, num_parts=None, max_num_parts=None):
|
||||
max_num_parts = max_num_parts or num_parts
|
||||
if max_num_parts is None or not self.specs:
|
||||
return ','.join(starmap(operator.add, self.specs))
|
||||
|
||||
op, version = self.specs[0]
|
||||
for v in self._sub_versions_pep440:
|
||||
version = version.replace(v, '.')
|
||||
if num_parts:
|
||||
version = (version.strip('.').split('.') + ['0'] * num_parts)[:max_num_parts]
|
||||
else:
|
||||
version = version.strip('.').split('.')[:max_num_parts]
|
||||
return op+'.'.join(version)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.req, item)
|
||||
@@ -99,6 +109,186 @@ class MarkerRequirement(object):
|
||||
else:
|
||||
self.specs = greater + smaller
|
||||
|
||||
def compare_version(self, requested_version, op=None, num_parts=3):
|
||||
"""
|
||||
compare the requested version with the one we have in the spec,
|
||||
If the requested version is 1.2.3 the self.spec should be 1.2.3*
|
||||
If the requested version is 1.2 the self.spec should be 1.2*
|
||||
etc.
|
||||
|
||||
:param str requested_version:
|
||||
:param str op: '==', '>', '>=', '<=', '<', '~='
|
||||
:param int num_parts: number of parts to compare
|
||||
:return: True if we answer the requested version
|
||||
"""
|
||||
# if we have no specific version, we cannot compare, so assume it's okay
|
||||
if not self.specs:
|
||||
return True
|
||||
|
||||
version = self.specs[0][1]
|
||||
op = (op or self.specs[0][0]).strip()
|
||||
|
||||
return SimpleVersion.compare_versions(requested_version, op, version)
|
||||
|
||||
|
||||
class SimpleVersion:
|
||||
_sub_versions_pep440 = ['a', 'b', 'rc', '.post', '.dev', '+', ]
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
@classmethod
|
||||
def compare_versions(cls, version_a, op, version_b, ignore_sub_versions=True):
|
||||
"""
|
||||
Compare two versions based on the op operator
|
||||
returns bool(version_a op version_b)
|
||||
Notice: Ignores a/b/rc/post/dev markers on the version
|
||||
|
||||
:param str version_a:
|
||||
:param str op: '==', '===', '>', '>=', '<=', '<', '~='
|
||||
:param str version_b:
|
||||
:param bool ignore_sub_versions: if true compare only major.minor.patch
|
||||
(ignore a/b/rc/post/dev in the comparison)
|
||||
:return bool: version_a op version_b
|
||||
"""
|
||||
|
||||
if not version_b:
|
||||
return True
|
||||
num_parts = 3
|
||||
|
||||
if op == '~=':
|
||||
num_parts = max(num_parts, 2)
|
||||
op = '=='
|
||||
ignore_sub_versions = True
|
||||
elif op == '===':
|
||||
op = '=='
|
||||
|
||||
try:
|
||||
version_a_key = cls._get_match_key(cls._regex.search(version_a), num_parts, ignore_sub_versions)
|
||||
version_b_key = cls._get_match_key(cls._regex.search(version_b), num_parts, ignore_sub_versions)
|
||||
except:
|
||||
# revert to string based
|
||||
for v in cls._sub_versions_pep440:
|
||||
version_a = version_a.replace(v, '.')
|
||||
version_b = version_b.replace(v, '.')
|
||||
|
||||
version_a = (version_a.strip('.').split('.') + ['0'] * num_parts)[:num_parts]
|
||||
version_b = (version_b.strip('.').split('.') + ['0'] * num_parts)[:num_parts]
|
||||
version_a_key = ''
|
||||
version_b_key = ''
|
||||
for i in range(num_parts):
|
||||
pad = '{:0>%d}.' % max([9, 1 + len(version_a[i]), 1 + len(version_b[i])])
|
||||
version_a_key += pad.format(version_a[i])
|
||||
version_b_key += pad.format(version_b[i])
|
||||
|
||||
if op == '==':
|
||||
return version_a_key == version_b_key
|
||||
if op == '<=':
|
||||
return version_a_key <= version_b_key
|
||||
if op == '>=':
|
||||
return version_a_key >= version_b_key
|
||||
if op == '>':
|
||||
return version_a_key > version_b_key
|
||||
if op == '<':
|
||||
return version_a_key < version_b_key
|
||||
raise ValueError('Unrecognized comparison operator [{}]'.format(op))
|
||||
|
||||
@staticmethod
|
||||
def _parse_letter_version(
|
||||
letter, # type: str
|
||||
number, # type: Union[str, bytes, SupportsInt]
|
||||
):
|
||||
# type: (...) -> Optional[Tuple[str, int]]
|
||||
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return ()
|
||||
|
||||
@staticmethod
|
||||
def _get_match_key(match, num_parts, ignore_sub_versions):
|
||||
if ignore_sub_versions:
|
||||
return (0, tuple(int(i) for i in match.group("release").split(".")[:num_parts]),
|
||||
(), (), (), (),)
|
||||
return (
|
||||
int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
tuple(int(i) for i in match.group("release").split(".")[:num_parts]),
|
||||
SimpleVersion._parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
SimpleVersion._parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
SimpleVersion._parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
SimpleVersion._parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_local_version(local):
|
||||
# type: (str) -> Optional[LocalType]
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in SimpleVersion._local_version_separators.split(local)
|
||||
)
|
||||
return ()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class RequirementSubstitution(object):
|
||||
@@ -177,7 +367,7 @@ class SimpleSubstitution(RequirementSubstitution):
|
||||
|
||||
if req.specs:
|
||||
_, version_number = req.specs[0]
|
||||
assert packaging_version.parse(version_number)
|
||||
# assert packaging_version.parse(version_number)
|
||||
else:
|
||||
version_number = self.get_pip_version(self.name)
|
||||
|
||||
|
||||
@@ -137,3 +137,7 @@ class Singleton(object):
|
||||
if os.environ.get(ENV_K8S_HOST_MOUNT):
|
||||
return os.environ.get(ENV_K8S_HOST_MOUNT).split(':')[-1]
|
||||
return gettempdir()
|
||||
|
||||
@classmethod
|
||||
def get_slot(cls):
|
||||
return cls.instance_slot or 0
|
||||
|
||||
@@ -35,6 +35,7 @@ def get_parser():
|
||||
for group_name, group in groups:
|
||||
p = parser if not group_name else parser.add_argument_group(group_name)
|
||||
for key, value in group:
|
||||
p.add_argument(key, **value)
|
||||
aliases = value.pop("aliases", [])
|
||||
p.add_argument(key, *aliases, **value)
|
||||
|
||||
return top_parser
|
||||
|
||||
@@ -72,6 +72,11 @@ DAEMON_ARGS = dict({
|
||||
'help': 'Do not use any network connects, assume everything is pre-installed',
|
||||
'action': 'store_true',
|
||||
},
|
||||
'--detached': {
|
||||
'help': 'Detached mode, run agent in the background',
|
||||
'action': 'store_true',
|
||||
'aliases': ['-d'],
|
||||
},
|
||||
|
||||
}, **WORKER_ARGS)
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = '0.14.0'
|
||||
__version__ = '0.14.1'
|
||||
|
||||
Reference in New Issue
Block a user