mirror of
https://github.com/NVIDIA/nvidia-container-toolkit
synced 2024-11-25 05:21:33 +00:00
Merge master into experimental
This commit is contained in:
parent
39aa24690c
commit
73baa74ea8
@ -20,23 +20,29 @@ default:
|
||||
variables:
|
||||
IMAGE: "${CI_REGISTRY_IMAGE}"
|
||||
IMAGE_TAG: "${CI_COMMIT_REF_SLUG}"
|
||||
BUILDIMAGE: "${CI_REGISTRY_IMAGE}/build:${CI_COMMIT_SHORT_SHA}"
|
||||
|
||||
stages:
|
||||
- image
|
||||
- lint
|
||||
- go-checks
|
||||
- go-build
|
||||
- unit-tests
|
||||
- build
|
||||
- build-long
|
||||
- scan
|
||||
- release
|
||||
|
||||
build-dev-image:
|
||||
stage: image
|
||||
before_script:
|
||||
- docker login -u "${CI_REGISTRY_USER}" -p "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
|
||||
script:
|
||||
- apk --no-cache add make bash
|
||||
- make .build-image
|
||||
- docker login -u "${CI_REGISTRY_USER}" -p "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
|
||||
- make .push-build-image
|
||||
|
||||
.requires-build-image:
|
||||
variables:
|
||||
SKIP_IMAGE_BUILD: "yes"
|
||||
before_script:
|
||||
- apk --no-cache add make bash
|
||||
- docker login -u "${CI_REGISTRY_USER}" -p "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
|
||||
- make .pull-build-image
|
||||
image: "${BUILDIMAGE}"
|
||||
|
||||
.go-check:
|
||||
extends:
|
||||
@ -47,45 +53,45 @@ fmt:
|
||||
extends:
|
||||
- .go-check
|
||||
script:
|
||||
- make docker-assert-fmt
|
||||
- make assert-fmt
|
||||
|
||||
vet:
|
||||
extends:
|
||||
- .go-check
|
||||
script:
|
||||
- make docker-vet
|
||||
- make vet
|
||||
|
||||
lint:
|
||||
extends:
|
||||
- .go-check
|
||||
script:
|
||||
- make docker-lint
|
||||
- make lint
|
||||
allow_failure: true
|
||||
|
||||
ineffassign:
|
||||
extends:
|
||||
- .go-check
|
||||
script:
|
||||
- make docker-ineffassign
|
||||
- make ineffassign
|
||||
allow_failure: true
|
||||
|
||||
misspell:
|
||||
extends:
|
||||
- .go-check
|
||||
script:
|
||||
- make docker-misspell
|
||||
- make misspell
|
||||
|
||||
go-build:
|
||||
extends:
|
||||
- .requires-build-image
|
||||
stage: go-build
|
||||
script:
|
||||
- make docker-build
|
||||
- make build
|
||||
|
||||
unit-tests:
|
||||
extends:
|
||||
- .requires-build-image
|
||||
stage: unit-tests
|
||||
script:
|
||||
- make docker-coverage
|
||||
- make coverage
|
||||
|
||||
|
@ -15,17 +15,6 @@
|
||||
include:
|
||||
- .common-ci.yml
|
||||
|
||||
stages:
|
||||
- image
|
||||
- go-checks
|
||||
- go-build
|
||||
- unit-tests
|
||||
- test
|
||||
- scan
|
||||
- release
|
||||
- build-one
|
||||
- build-all
|
||||
|
||||
.build-setup:
|
||||
before_script:
|
||||
- apk update
|
||||
@ -42,7 +31,7 @@ stages:
|
||||
.build-one-setup:
|
||||
extends:
|
||||
- .build-setup
|
||||
stage: build-one
|
||||
stage: build
|
||||
rules:
|
||||
- if: $CI_MERGE_REQUEST_ID
|
||||
|
||||
@ -59,7 +48,7 @@ stages:
|
||||
.build-all-setup:
|
||||
extends:
|
||||
- .build-setup
|
||||
stage: build-all
|
||||
stage: build-long
|
||||
timeout: 2h 30m
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG
|
||||
|
9
.gitmodules
vendored
Normal file
9
.gitmodules
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
[submodule "third_party/libnvidia-container"]
|
||||
path = third_party/libnvidia-container
|
||||
url = https://gitlab.com/nvidia/container-toolkit/libnvidia-container.git
|
||||
[submodule "third_party/nvidia-container-runtime"]
|
||||
path = third_party/nvidia-container-runtime
|
||||
url = https://gitlab.com/nvidia/container-toolkit/container-runtime.git
|
||||
[submodule "third_party/nvidia-docker"]
|
||||
path = third_party/nvidia-docker
|
||||
url = https://gitlab.com/nvidia/container-toolkit/nvidia-docker.git
|
45
DEVELOPMENT.md
Normal file
45
DEVELOPMENT.md
Normal file
@ -0,0 +1,45 @@
|
||||
# NVIDIA Container Toolkit Release Tooling
|
||||
|
||||
This repository allows for the components of the NVIDIA container stack to be
|
||||
built and released as the NVIDIA Container Toolkit from a single repository. The components:
|
||||
* `libnvidia-container`
|
||||
* `nvidia-container-runtime`
|
||||
* `nvidia-docker`
|
||||
are included as submodules in the `third_party` folder.
|
||||
|
||||
The `nvidia-container-toolkit` resides in this repo directly.
|
||||
|
||||
## Building
|
||||
|
||||
In oder to build the packages, the following command is executed
|
||||
```sh
|
||||
./scripts/build-all-components.sh TARGET
|
||||
```
|
||||
where `TARGET` is a make target that is valid for each of the sub-components.
|
||||
|
||||
These include:
|
||||
* `ubuntu18.04-amd64`
|
||||
* `centos8-x86_64`
|
||||
|
||||
The packages are generated in the `dist` folder.
|
||||
|
||||
## Testing local changes
|
||||
|
||||
In oder to use the same build logic to be used to generate packages with local changes,
|
||||
the location of the individual components can be overridded using the: `LIBNVIDIA_CONTAINER_ROOT`,
|
||||
`NVIDIA_CONTAINER_TOOLKIT_ROOT`, `NVIDIA_CONTAINER_RUNTIME_ROOT`, and `NVIDIA_DOCKER_ROOT`
|
||||
environment variables.
|
||||
|
||||
## Testing packages locally
|
||||
|
||||
The [test/release](./test/release/) folder contains documentation on how the installation of local or staged packages can be tested.
|
||||
|
||||
|
||||
## Releasing
|
||||
|
||||
A utility script [`scripts/release.sh`](./scripts/release.sh) is provided to build
|
||||
packages required for release. If run without arguments, all supported distribution-architecture combinations are built. A specific distribution-architecture pair can also be provided
|
||||
```sh
|
||||
./scripts/release.sh ubuntu18.04-amd64
|
||||
```
|
||||
where the `amd64` builds for `ubuntu18.04` are provided as an example.
|
13
Makefile
13
Makefile
@ -17,7 +17,7 @@ MKDIR ?= mkdir
|
||||
DIST_DIR ?= $(CURDIR)/dist
|
||||
|
||||
LIB_NAME := nvidia-container-toolkit
|
||||
LIB_VERSION := 1.5.2
|
||||
LIB_VERSION := 1.6.0
|
||||
LIB_TAG ?= rc.1
|
||||
|
||||
GOLANG_VERSION := 1.16.3
|
||||
@ -27,12 +27,13 @@ MODULE := github.com/NVIDIA/nvidia-container-toolkit
|
||||
docker-native:
|
||||
include $(CURDIR)/docker/docker.mk
|
||||
|
||||
ifeq ($(IMAGE),)
|
||||
ifeq ($(IMAGE_NAME),)
|
||||
REGISTRY ?= nvidia
|
||||
IMAGE=$(REGISTRY)/container-toolkit
|
||||
IMAGE_NAME = $(REGISTRY)/container-toolkit
|
||||
endif
|
||||
IMAGE_TAG ?= $(GOLANG_VERSION)
|
||||
BUILDIMAGE ?= $(IMAGE):$(IMAGE_TAG)-devel
|
||||
|
||||
BUILDIMAGE_TAG ?= golang$(GOLANG_VERSION)
|
||||
BUILDIMAGE ?= $(IMAGE_NAME)-build:$(BUILDIMAGE_TAG)
|
||||
|
||||
EXAMPLES := $(patsubst ./examples/%/,%,$(sort $(dir $(wildcard ./examples/*/))))
|
||||
EXAMPLE_TARGETS := $(patsubst %,example-%, $(EXAMPLES))
|
||||
@ -43,7 +44,7 @@ CMD_TARGETS := $(patsubst %,cmd-%, $(CMDS))
|
||||
$(info CMD_TARGETS=$(CMD_TARGETS))
|
||||
|
||||
CHECK_TARGETS := assert-fmt vet lint ineffassign misspell
|
||||
MAKE_TARGETS := binaries build all check fmt lint-internal test examples cmds coverage generate $(CHECK_TARGETS)
|
||||
MAKE_TARGETS := binaries build check fmt lint-internal test examples cmds coverage generate $(CHECK_TARGETS)
|
||||
|
||||
TARGETS := $(MAKE_TARGETS) $(EXAMPLE_TARGETS) $(CMD_TARGETS)
|
||||
|
||||
|
31
README.md
Normal file
31
README.md
Normal file
@ -0,0 +1,31 @@
|
||||
# NVIDIA Container Toolkit
|
||||
|
||||
[![GitHub license](https://img.shields.io/github/license/NVIDIA/nvidia-container-toolkit?style=flat-square)](https://raw.githubusercontent.com/NVIDIA/nvidia-container-toolkit/master/LICENSE)
|
||||
[![Documentation](https://img.shields.io/badge/documentation-wiki-blue.svg?style=flat-square)](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html)
|
||||
[![Package repository](https://img.shields.io/badge/packages-repository-b956e8.svg?style=flat-square)](https://nvidia.github.io/libnvidia-container)
|
||||
|
||||
![nvidia-container-stack](https://cloud.githubusercontent.com/assets/3028125/12213714/5b208976-b632-11e5-8406-38d379ec46aa.png)
|
||||
|
||||
## Introduction
|
||||
|
||||
The NVIDIA Container Toolkit allows users to build and run GPU accelerated containers. The toolkit includes a container runtime [library](https://github.com/NVIDIA/libnvidia-container) and utilities to automatically configure containers to leverage NVIDIA GPUs.
|
||||
|
||||
Product documentation including an architecture overview, platform support, and installation and usage guides can be found in the [documentation repository](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html).
|
||||
|
||||
## Getting Started
|
||||
|
||||
**Make sure you have installed the [NVIDIA driver](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#nvidia-drivers) for your Linux Distribution**
|
||||
**Note that you do not need to install the CUDA Toolkit on the host system, but the NVIDIA driver needs to be installed**
|
||||
|
||||
For instructions on getting started with the NVIDIA Container Toolkit, refer to the [installation guide](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#installation-guide).
|
||||
|
||||
## Usage
|
||||
|
||||
The [user guide](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/user-guide.html) provides information on the configuration and command line options available when running GPU containers with Docker.
|
||||
|
||||
## Issues and Contributing
|
||||
|
||||
[Checkout the Contributing document!](CONTRIBUTING.md)
|
||||
|
||||
* Please let us know by [filing a new issue](https://github.com/NVIDIA/nvidia-container-toolkit/issues/new)
|
||||
* You can contribute by creating a [merge request](https://gitlab.com/nvidia/container-toolkit/container-toolkit/-/merge_requests/new) to our public GitLab repository
|
@ -3,10 +3,12 @@ FROM ${BASEIMAGE}
|
||||
|
||||
RUN yum install -y \
|
||||
ca-certificates \
|
||||
wget \
|
||||
gcc \
|
||||
git \
|
||||
make \
|
||||
rpm-build \
|
||||
make && \
|
||||
wget \
|
||||
&& \
|
||||
rm -rf /var/cache/yum/*
|
||||
|
||||
ARG GOLANG_VERSION=0.0.0
|
||||
@ -59,6 +61,7 @@ CMD arch=$(uname -m) && \
|
||||
rpmbuild --clean --target=$arch -bb \
|
||||
-D "_topdir $PWD" \
|
||||
-D "version $VERSION" \
|
||||
-D "libnvidia_container_version ${VERSION}-${RELEASE}" \
|
||||
-D "release $RELEASE" \
|
||||
SPECS/nvidia-container-toolkit.spec && \
|
||||
mv RPMS/$arch/*.rpm /dist
|
||||
|
@ -3,10 +3,12 @@ FROM ${BASEIMAGE}
|
||||
|
||||
RUN yum install -y \
|
||||
ca-certificates \
|
||||
wget \
|
||||
gcc \
|
||||
git \
|
||||
make \
|
||||
rpm-build && \
|
||||
rpm-build \
|
||||
wget \
|
||||
&& \
|
||||
rm -rf /var/cache/yum/*
|
||||
|
||||
ARG GOLANG_VERSION=0.0.0
|
||||
@ -57,6 +59,7 @@ CMD arch=$(uname -m) && \
|
||||
rpmbuild --clean --target=$arch -bb \
|
||||
-D "_topdir $PWD" \
|
||||
-D "version $VERSION" \
|
||||
-D "libnvidia_container_version ${VERSION}-${RELEASE}" \
|
||||
-D "release $RELEASE" \
|
||||
SPECS/nvidia-container-toolkit.spec && \
|
||||
mv RPMS/$arch/*.rpm /dist
|
||||
|
@ -64,5 +64,6 @@ RUN sed -i "s;@VERSION@;${REVISION};" debian/changelog && \
|
||||
if [ "$REVISION" != "$(dpkg-parsechangelog --show-field=Version)" ]; then exit 1; fi
|
||||
|
||||
CMD export DISTRIB="$(lsb_release -cs)" && \
|
||||
debuild -eDISTRIB -eSECTION --dpkg-buildpackage-hook='sh debian/prepare' -i -us -uc -b && \
|
||||
debuild -eDISTRIB -eSECTION -eLIBNVIDIA_CONTAINER_VERSION="${REVISION}" \
|
||||
--dpkg-buildpackage-hook='sh debian/prepare' -i -us -uc -b && \
|
||||
mv /tmp/nvidia-container-toolkit_*.deb /dist
|
||||
|
@ -56,6 +56,7 @@ CMD arch=$(uname -m) && \
|
||||
rpmbuild --clean --target=$arch -bb \
|
||||
-D "_topdir $PWD" \
|
||||
-D "version $VERSION" \
|
||||
-D "libnvidia_container_version ${VERSION}-${RELEASE}" \
|
||||
-D "release $RELEASE" \
|
||||
SPECS/nvidia-container-toolkit.spec && \
|
||||
mv RPMS/$arch/*.rpm /dist
|
||||
|
@ -57,5 +57,6 @@ RUN sed -i "s;@VERSION@;${REVISION};" debian/changelog && \
|
||||
if [ "$REVISION" != "$(dpkg-parsechangelog --show-field=Version)" ]; then exit 1; fi
|
||||
|
||||
CMD export DISTRIB="$(lsb_release -cs)" && \
|
||||
debuild -eDISTRIB -eSECTION --dpkg-buildpackage-hook='sh debian/prepare' -i -us -uc -b && \
|
||||
debuild -eDISTRIB -eSECTION -eLIBNVIDIA_CONTAINER_VERSION="${REVISION}" \
|
||||
--dpkg-buildpackage-hook='sh debian/prepare' -i -us -uc -b && \
|
||||
mv /tmp/*.deb /dist
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
# Supported OSs by architecture
|
||||
AMD64_TARGETS := ubuntu20.04 ubuntu18.04 ubuntu16.04 debian10 debian9
|
||||
X86_64_TARGETS := centos7 centos8 rhel7 rhel8 amazonlinux1 amazonlinux2 opensuse-leap15.1
|
||||
X86_64_TARGETS := centos7 centos8 rhel7 rhel8 amazonlinux2 opensuse-leap15.1
|
||||
PPC64LE_TARGETS := ubuntu18.04 ubuntu16.04 centos7 centos8 rhel7 rhel8
|
||||
ARM64_TARGETS := ubuntu20.04 ubuntu18.04
|
||||
AARCH64_TARGETS := centos8 rhel8
|
||||
@ -97,11 +97,11 @@ docker-all: $(AMD64_TARGETS) $(X86_64_TARGETS) \
|
||||
|
||||
# private centos target
|
||||
--centos%: OS := centos
|
||||
--centos%: PKG_REV := $(if $(LIB_TAG),0.1.$(LIB_TAG),2)
|
||||
--centos%: PKG_REV := $(if $(LIB_TAG),0.1.$(LIB_TAG),1)
|
||||
|
||||
# private amazonlinux target
|
||||
--amazonlinux%: OS := amazonlinux
|
||||
--amazonlinux%: PKG_REV = $(if $(LIB_TAG),0.1.$(LIB_TAG).amzn$(VERSION),2.amzn$(VERSION))
|
||||
--amazonlinux%: PKG_REV := $(if $(LIB_TAG),0.1.$(LIB_TAG),1)
|
||||
|
||||
# private opensuse-leap target
|
||||
--opensuse-leap%: OS = opensuse-leap
|
||||
@ -110,7 +110,7 @@ docker-all: $(AMD64_TARGETS) $(X86_64_TARGETS) \
|
||||
|
||||
# private rhel target (actually built on centos)
|
||||
--rhel%: OS := centos
|
||||
--rhel%: PKG_REV := $(if $(LIB_TAG),0.1.$(LIB_TAG),2)
|
||||
--rhel%: PKG_REV := $(if $(LIB_TAG),0.1.$(LIB_TAG),1)
|
||||
--rhel%: VERSION = $(patsubst rhel%-$(ARCH),%,$(TARGET_PLATFORM))
|
||||
--rhel%: ARTIFACTS_DIR = $(DIST_DIR)/rhel$(VERSION)/$(ARCH)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
nvidia-container-toolkit (1.5.2~rc.1-1) experimental; urgency=medium
|
||||
nvidia-container-toolkit (1.6.0~rc.1-1) experimental; urgency=medium
|
||||
|
||||
* Include nvidia-container-runtime into nvidia-container-toolkit package
|
||||
|
||||
|
@ -10,8 +10,8 @@ Build-Depends: debhelper (>= 9)
|
||||
|
||||
Package: nvidia-container-toolkit
|
||||
Architecture: any
|
||||
Depends: ${misc:Depends}, libnvidia-container-tools (>= 1.4.0), libnvidia-container-tools (<< 2.0.0), libseccomp2
|
||||
Breaks: nvidia-container-runtime (<= 3.5.0), nvidia-container-runtime-hook
|
||||
Replaces: nvidia-container-runtime (<= 3.5.0), nvidia-container-runtime-hook
|
||||
Depends: ${misc:Depends}, libnvidia-container-tools (>= @LIBNVIDIA_CONTAINER_VERSION@), libnvidia-container-tools (<< 2.0.0), libseccomp2
|
||||
Breaks: nvidia-container-runtime (<= 3.5.0-1), nvidia-container-runtime-hook
|
||||
Replaces: nvidia-container-runtime (<= 3.5.0-1), nvidia-container-runtime-hook
|
||||
Description: NVIDIA container runtime hook
|
||||
Provides a OCI hook to enable GPU support in containers.
|
||||
|
@ -3,6 +3,7 @@
|
||||
set -e
|
||||
|
||||
sed -i "s;@SECTION@;${SECTION:+$SECTION/};g" debian/control
|
||||
sed -i "s;@LIBNVIDIA_CONTAINER_VERSION@;${LIBNVIDIA_CONTAINER_VERSION:+$LIBNVIDIA_CONTAINER_VERSION};g" debian/control
|
||||
|
||||
if [ -n "$DISTRIB" ]; then
|
||||
sed -i "s;UNRELEASED;$DISTRIB;" debian/changelog
|
||||
|
@ -17,10 +17,10 @@ Source3: oci-nvidia-hook
|
||||
Source4: oci-nvidia-hook.json
|
||||
Source5: LICENSE
|
||||
|
||||
Obsoletes: nvidia-container-runtime <= 3.5.0, nvidia-container-runtime-hook
|
||||
Obsoletes: nvidia-container-runtime <= 3.5.0-1, nvidia-container-runtime-hook
|
||||
Provides: nvidia-container-runtime
|
||||
Provides: nvidia-container-runtime-hook
|
||||
Requires: libnvidia-container-tools >= 1.4.0, libnvidia-container-tools < 2.0.0
|
||||
Requires: libnvidia-container-tools >= %{libnvidia_container_version}, libnvidia-container-tools < 2.0.0
|
||||
|
||||
%if 0%{?suse_version}
|
||||
Requires: libseccomp2
|
||||
@ -64,7 +64,7 @@ rm -f %{_bindir}/nvidia-container-runtime-hook
|
||||
/usr/share/containers/oci/hooks.d/oci-nvidia-hook.json
|
||||
|
||||
%changelog
|
||||
* Mon Sep 06 2021 NVIDIA CORPORATION <cudatools@nvidia.com> 1.5.2-0.1.rc.1
|
||||
* Mon Sep 06 2021 NVIDIA CORPORATION <cudatools@nvidia.com> 1.6.0-0.1.rc.1
|
||||
|
||||
- Include nvidia-container-runtime into nvidia-container-toolkit package
|
||||
|
||||
|
67
scripts/build-all-components.sh
Executable file
67
scripts/build-all-components.sh
Executable file
@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This script is used to build the packages for the components of the NVIDIA
|
||||
# Container Stack. These include the nvidia-container-toolkit in this repository
|
||||
# as well as the components included in the third_party folder.
|
||||
# All required packages are generated in the specified dist folder.
|
||||
|
||||
function assert_usage() {
|
||||
echo "Missing argument $1"
|
||||
echo "$(basename ${BASH_SOURCE[0]}) TARGET"
|
||||
exit 1
|
||||
}
|
||||
|
||||
set -e -x
|
||||
|
||||
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../scripts && pwd )"
|
||||
PROJECT_ROOT="$( cd ${SCRIPTS_DIR}/.. && pwd )"
|
||||
|
||||
if [[ $# -ne 1 ]]; then
|
||||
assert_usage "TARGET"
|
||||
fi
|
||||
|
||||
TARGET=$1
|
||||
|
||||
: ${DIST_DIR:=${PROJECT_ROOT}/dist}
|
||||
export DIST_DIR
|
||||
|
||||
echo "Building ${TARGET} for all packages to ${DIST_DIR}"
|
||||
|
||||
: ${LIBNVIDIA_CONTAINER_ROOT:=${PROJECT_ROOT}/third_party/libnvidia-container}
|
||||
: ${NVIDIA_CONTAINER_TOOLKIT_ROOT:=${PROJECT_ROOT}}
|
||||
: ${NVIDIA_CONTAINER_RUNTIME_ROOT:=${PROJECT_ROOT}/third_party/nvidia-container-runtime}
|
||||
: ${NVIDIA_DOCKER_ROOT:=${PROJECT_ROOT}/third_party/nvidia-docker}
|
||||
|
||||
|
||||
${SCRIPTS_DIR}/get-component-versions.sh
|
||||
|
||||
# Build libnvidia-container
|
||||
make -C ${LIBNVIDIA_CONTAINER_ROOT} -f mk/docker.mk ${TARGET}
|
||||
|
||||
# Build nvidia-container-toolkit
|
||||
make -C ${NVIDIA_CONTAINER_TOOLKIT_ROOT} ${TARGET}
|
||||
|
||||
# We set the TOOLKIT_VERSION for the nvidia-container-runtime and nvidia-docker targets
|
||||
# TODO: This is not yet enabled in the makefiles below
|
||||
: ${PREVIOUS_TOOLKIT_VERSION:=1.5.1}
|
||||
echo "Using TOOLKIT_VERSION=${PREVIOUS_TOOLKIT_VERSION} as previous nvidia-container-toolkit version"
|
||||
|
||||
# Build nvidia-container-runtime
|
||||
make -C ${NVIDIA_CONTAINER_RUNTIME_ROOT} TOOLKIT_VERSION=${PREVIOUS_TOOLKIT_VERSION} ${TARGET}
|
||||
|
||||
# Build nvidia-docker2
|
||||
make -C ${NVIDIA_DOCKER_ROOT} TOOLKIT_VERSION=${PREVIOUS_TOOLKIT_VERSION} ${TARGET}
|
62
scripts/get-component-versions.sh
Executable file
62
scripts/get-component-versions.sh
Executable file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This script is used to build the packages for the components of the NVIDIA
|
||||
# Container Stack. These include the nvidia-container-toolkit in this repository
|
||||
# as well as the components included in the third_party folder.
|
||||
# All required packages are generated in the specified dist folder.
|
||||
|
||||
function assert_usage() {
|
||||
exit 1
|
||||
}
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../scripts && pwd )"
|
||||
PROJECT_ROOT="$( cd ${SCRIPTS_DIR}/.. && pwd )"
|
||||
|
||||
: ${LIBNVIDIA_CONTAINER_ROOT:=${PROJECT_ROOT}/third_party/libnvidia-container}
|
||||
: ${NVIDIA_CONTAINER_TOOLKIT_ROOT:=${PROJECT_ROOT}}
|
||||
: ${NVIDIA_CONTAINER_RUNTIME_ROOT:=${PROJECT_ROOT}/third_party/nvidia-container-runtime}
|
||||
: ${NVIDIA_DOCKER_ROOT:=${PROJECT_ROOT}/third_party/nvidia-docker}
|
||||
|
||||
# Get version for libnvidia-container
|
||||
libnvidia_container_version=$(grep "#define NVC_VERSION" ${LIBNVIDIA_CONTAINER_ROOT}/src/nvc.h \
|
||||
| sed -e 's/#define NVC_VERSION[[:space:]]"\(.*\)"/\1/')
|
||||
|
||||
# Get version for nvidia-container-toolit
|
||||
nvidia_container_toolkit_version=$(grep -m 1 "^LIB_VERSION := " ${NVIDIA_CONTAINER_TOOLKIT_ROOT}/Makefile | sed -e 's/LIB_VERSION :=[[:space:]]\(.*\)[[:space:]]*/\1/')
|
||||
nvidia_container_toolkit_tag=$(grep -m 1 "^LIB_TAG .= " ${NVIDIA_CONTAINER_TOOLKIT_ROOT}/Makefile | sed -e 's/LIB_TAG .=[[:space:]]\(.*\)[[:space:]]*/\1/')
|
||||
nvidia_container_toolkit_version="${nvidia_container_toolkit_version}${nvidia_container_toolkit_tag:+~${nvidia_container_toolkit_tag}}"
|
||||
|
||||
# Get version for nvidia-container-runtime
|
||||
nvidia_container_runtime_version=$(grep -m 1 "^LIB_VERSION := " ${NVIDIA_CONTAINER_RUNTIME_ROOT}/Makefile | sed -e 's/LIB_VERSION :=[[:space:]]\(.*\)[[:space:]]*/\1/')
|
||||
nvidia_container_runtime_tag=$(grep -m 1 "^LIB_TAG .= " ${NVIDIA_CONTAINER_RUNTIME_ROOT}/Makefile | sed -e 's/LIB_TAG .=[[:space:]]\(.*\)[[:space:]]*/\1/')
|
||||
nvidia_container_runtime_version="${nvidia_container_runtime_version}${nvidia_container_runtime_tag:+~${nvidia_container_runtime_tag}}"
|
||||
|
||||
# Get version for nvidia-docker
|
||||
nvidia_docker_version=$(grep -m 1 "^LIB_VERSION := " ${NVIDIA_DOCKER_ROOT}/Makefile | sed -e 's/LIB_VERSION :=[[:space:]]\(.*\)[[:space:]]*/\1/')
|
||||
nvidia_docker_tag=$(grep -m 1 "^LIB_TAG .= " ${NVIDIA_DOCKER_ROOT}/Makefile | sed -e 's/LIB_TAG .=[[:space:]]\(.*\)[[:space:]]*/\1/')
|
||||
nvidia_docker_version="${nvidia_docker_version}${nvidia_docker_tag:+~${nvidia_docker_tag}}"
|
||||
|
||||
|
||||
echo "LIBNVIDIA_CONTAINER_VERSION=${libnvidia_container_version}"
|
||||
echo "NVIDIA_CONTAINER_TOOLKIT_VERSION=${nvidia_container_toolkit_version}"
|
||||
if [[ "${libnvidia_container_version}" != "${nvidia_container_toolkit_version}" ]]; then
|
||||
>&2 echo "WARNING: The libnvidia-container and nvidia-container-toolkit versions do not match"
|
||||
fi
|
||||
echo "NVIDIA_CONTAINER_RUNTIME_VERSION=${nvidia_container_runtime_version}"
|
||||
echo "NVIDIA_DOCKER_VERSION=${nvidia_docker_version}"
|
55
scripts/release.sh
Executable file
55
scripts/release.sh
Executable file
@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This script is used to build the packages for the components of the NVIDIA
|
||||
# Container Stack. These include the nvidia-container-toolkit in this repository
|
||||
# as well as the components included in the third_party folder.
|
||||
# All required packages are generated in the specified dist folder.
|
||||
|
||||
set -e -x
|
||||
|
||||
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../scripts && pwd )"
|
||||
PROJECT_ROOT="$( cd ${SCRIPTS_DIR}/.. && pwd )"
|
||||
|
||||
# This list represents the distribution-architecture pairs that are actually published
|
||||
# to the relevant repositories. This targets forwarded to the build-all-components script
|
||||
# can be overridden by specifying command line arguments.
|
||||
all=(
|
||||
amazonlinux2-x86_64
|
||||
centos7-ppc64le
|
||||
centos7-x86_64
|
||||
centos8-aarch64
|
||||
centos8-ppc64le
|
||||
centos8-x86_64
|
||||
debian10-amd64
|
||||
debian9-amd64
|
||||
opensuse-leap15.1-x86_64
|
||||
ubuntu16.04-amd64
|
||||
ubuntu16.04-ppc64le
|
||||
ubuntu18.04-amd64
|
||||
ubuntu18.04-arm64
|
||||
ubuntu18.04-ppc64le
|
||||
)
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
targets=($*)
|
||||
else
|
||||
targets=${all[@]}
|
||||
fi
|
||||
|
||||
for target in ${targets[@]}; do
|
||||
${SCRIPTS_DIR}/build-all-components.sh ${target}
|
||||
done
|
59
test/release/Makefile
Normal file
59
test/release/Makefile
Normal file
@ -0,0 +1,59 @@
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
WORKFLOW ?= nvidia-docker
|
||||
TEST_REPO ?= elezar.github.io
|
||||
|
||||
DISTRIBUTIONS := ubuntu18.04 centos8
|
||||
|
||||
IMAGE_TARGETS := $(patsubst %,image-%, $(DISTRIBUTIONS))
|
||||
RUN_TARGETS := $(patsubst %,run-%, $(DISTRIBUTIONS))
|
||||
RELEASE_TARGETS := $(patsubst %,release-%, $(DISTRIBUTIONS))
|
||||
LOCAL_TARGETS := $(patsubst %,local-%, $(DISTRIBUTIONS))
|
||||
|
||||
.PHONY: $(IMAGE_TARGETS)
|
||||
|
||||
image-%: DOCKERFILE = docker/$(*)/Dockerfile
|
||||
|
||||
images: $(IMAGE_TARGETS)
|
||||
$(IMAGE_TARGETS): image-%:
|
||||
docker build \
|
||||
--build-arg WORKFLOW="$(WORKFLOW)" \
|
||||
--build-arg TEST_REPO="$(TEST_REPO)" \
|
||||
-t nvidia-container-toolkit-repo-test:$(*) \
|
||||
-f $(DOCKERFILE) \
|
||||
$(shell dirname $(DOCKERFILE))
|
||||
|
||||
|
||||
%-ubuntu18.04: ARCH = amd64
|
||||
%-centos8: ARCH = x86_64
|
||||
|
||||
RELEASE_TEST_DIR := $(patsubst %/,%,$(dir $(abspath $(lastword $(MAKEFILE_LIST)))))
|
||||
PROJECT_ROOT := $(RELEASE_TEST_DIR)/../..
|
||||
|
||||
LOCAL_PACKAGE_ROOT := $(PROJECT_ROOT)/dist
|
||||
|
||||
local-%: DIST = $(*)
|
||||
local-%: LOCAL_REPO_ARGS = -v $(LOCAL_PACKAGE_ROOT)/$(DIST)/$(ARCH):/local-repository
|
||||
$(LOCAL_TARGETS): local-%: release-% run-% | release-%
|
||||
|
||||
run-%: DIST = $(*)
|
||||
$(RUN_TARGETS): run-%:
|
||||
docker run --rm -ti \
|
||||
$(LOCAL_REPO_ARGS) \
|
||||
nvidia-container-toolkit-repo-test:$(*)
|
||||
|
||||
# Ensure that the local package root exists
|
||||
$(RELEASE_TARGETS): release-%: $(LOCAL_PACKAGE_ROOT)/$(*)/$(ARCH)
|
||||
$(PROJECT_ROOT)/scripts/release.sh $(*)-$(ARCH)
|
107
test/release/README.md
Normal file
107
test/release/README.md
Normal file
@ -0,0 +1,107 @@
|
||||
# Testing Packaging Workflows
|
||||
|
||||
## Building the Docker images:
|
||||
|
||||
```bash
|
||||
make images
|
||||
```
|
||||
|
||||
This assumes that the `nvidia-docker` workflow is being tested and that the test packages have been published to the `elezar.github.io` GitHub pages page. These can be overridden
|
||||
using make variables.
|
||||
|
||||
Valid values for `workflow` are:
|
||||
* `nvidia-docker`
|
||||
* `nvidia-container-runtime`
|
||||
|
||||
This follows the instructions for setting up the [`nvidia-docker` repostitory](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#setting-up-nvidia-container-toolkit).
|
||||
|
||||
|
||||
## Testing local package changes
|
||||
|
||||
Running:
|
||||
```bash
|
||||
make local-ubuntu18.04
|
||||
```
|
||||
will build the `ubuntu18.04-amd64` packages for release and launch a docker container with these packages added to a local APT repository.
|
||||
|
||||
The various `apt` workflows can then be tested as if the packages were released to the `libnvidia-container` experimental repository.
|
||||
|
||||
The `local-centos8` make target is available for testing the `centos8-x86_64` workflows as representative of `yum`-based installation workflows.
|
||||
|
||||
|
||||
### Example
|
||||
|
||||
In the `centos8`-based container above we see the following `nvidia-docker2` packages available with the `local-repository` disabled:
|
||||
|
||||
```bash
|
||||
$ yum list --showduplicates nvidia-docker2 | tail -2
|
||||
nvidia-docker2.noarch 2.5.0-1 nvidia-docker
|
||||
nvidia-docker2.noarch 2.6.0-1 nvidia-docker
|
||||
```
|
||||
|
||||
Installing `nvidia-docker2` shows:
|
||||
```bash
|
||||
$ yum install -y nvidia-docker2
|
||||
```
|
||||
|
||||
installs the following packages:
|
||||
```bash
|
||||
$ yum list installed | grep nvidia
|
||||
libnvidia-container-tools.x86_64 1.5.1-1 @libnvidia-container
|
||||
libnvidia-container1.x86_64 1.5.1-1 @libnvidia-container
|
||||
nvidia-container-runtime.x86_64 3.5.0-1 @nvidia-container-runtime
|
||||
nvidia-container-toolkit.x86_64 1.5.1-2 @nvidia-container-runtime
|
||||
nvidia-docker2.noarch 2.6.0-1 @nvidia-docker
|
||||
```
|
||||
Note the repositories where these packages were installed from.
|
||||
|
||||
We now enable the `local-repository` to simulate the new packages being published to the `libnvidia-container` experimental repository and check the available `nvidia-docker2` versions:
|
||||
```bash
|
||||
$ yum-config-manager --enable local-repository
|
||||
$ yum list --showduplicates nvidia-docker2 | tail -2
|
||||
nvidia-docker2.noarch 2.6.0-1 nvidia-docker
|
||||
nvidia-docker2.noarch 2.6.1-0.1.rc.1 local-repository
|
||||
```
|
||||
Showing the new version available in the local repository.
|
||||
|
||||
Running:
|
||||
```
|
||||
$ yum install nvidia-docker2
|
||||
Last metadata expiration check: 0:01:15 ago on Fri Sep 24 12:49:20 2021.
|
||||
Package nvidia-docker2-2.6.0-1.noarch is already installed.
|
||||
Dependencies resolved.
|
||||
===============================================================================================================================
|
||||
Package Architecture Version Repository Size
|
||||
===============================================================================================================================
|
||||
Upgrading:
|
||||
libnvidia-container-tools x86_64 1.6.0-0.1.rc.1 local-repository 48 k
|
||||
libnvidia-container1 x86_64 1.6.0-0.1.rc.1 local-repository 95 k
|
||||
nvidia-container-toolkit x86_64 1.6.0-0.1.rc.1 local-repository 1.5 M
|
||||
replacing nvidia-container-runtime.x86_64 3.5.0-1
|
||||
nvidia-docker2 noarch 2.6.1-0.1.rc.1 local-repository 13 k
|
||||
|
||||
Transaction Summary
|
||||
===============================================================================================================================
|
||||
Upgrade 4 Packages
|
||||
|
||||
Total size: 1.7 M
|
||||
```
|
||||
Showing that all the components of the stack will be updated with versions from the `local-repository`.
|
||||
|
||||
After installation the installed packages are shown as:
|
||||
```bash
|
||||
$ yum list installed | grep nvidia
|
||||
libnvidia-container-tools.x86_64 1.6.0-0.1.rc.1 @local-repository
|
||||
libnvidia-container1.x86_64 1.6.0-0.1.rc.1 @local-repository
|
||||
nvidia-container-toolkit.x86_64 1.6.0-0.1.rc.1 @local-repository
|
||||
nvidia-docker2.noarch 2.6.1-0.1.rc.1 @local-repository
|
||||
```
|
||||
Showing that:
|
||||
1. All versions have been installed from the same repository
|
||||
2. The `nvidia-container-runtime` package was removed as it is no longer required.
|
||||
|
||||
The `nvidia-container-runtime` executable is, however, still present on the system:
|
||||
```bash
|
||||
# ls -l /usr/bin/nvidia-container-runtime
|
||||
-rwxr-xr-x 1 root root 2256280 Sep 24 12:42 /usr/bin/nvidia-container-runtime
|
||||
```
|
35
test/release/docker/centos8/Dockerfile
Normal file
35
test/release/docker/centos8/Dockerfile
Normal file
@ -0,0 +1,35 @@
|
||||
FROM centos:8
|
||||
|
||||
RUN yum install -y \
|
||||
yum-utils \
|
||||
ruby-devel \
|
||||
gcc \
|
||||
make \
|
||||
rpm-build \
|
||||
rubygems \
|
||||
createrepo
|
||||
|
||||
RUN gem install --no-document fpm
|
||||
|
||||
# We create and install a dummy docker package since these dependencies are out of
|
||||
# scope for the tests performed here.
|
||||
RUN fpm -s empty \
|
||||
-t rpm \
|
||||
--description "A dummy package for docker-ce_18.06.3.ce-3.el7" \
|
||||
-n docker-ce --version 18.06.3.ce-3.el7 \
|
||||
-p /tmp/docker.rpm \
|
||||
&& \
|
||||
yum localinstall -y /tmp/docker.rpm \
|
||||
&& \
|
||||
rm -f /tmp/docker.rpm
|
||||
|
||||
|
||||
ARG WORKFLOW=nvidia-docker
|
||||
ARG TEST_REPO=nvidia.github.io
|
||||
ENV TEST_REPO ${TEST_REPO}
|
||||
RUN curl -s -L https://nvidia.github.io/${WORKFLOW}/centos8/nvidia-docker.repo \
|
||||
| tee /etc/yum.repos.d/nvidia-docker.repo
|
||||
|
||||
COPY entrypoint.sh /
|
||||
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
42
test/release/docker/centos8/entrypoint.sh
Executable file
42
test/release/docker/centos8/entrypoint.sh
Executable file
@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This script is used to build the packages for the components of the NVIDIA
|
||||
# Container Stack. These include the nvidia-container-toolkit in this repository
|
||||
# as well as the components included in the third_party folder.
|
||||
# All required packages are generated in the specified dist folder.
|
||||
|
||||
: ${LOCAL_REPO_DIRECTORY:=/local-repository}
|
||||
if [[ -d ${LOCAL_REPO_DIRECTORY} ]]; then
|
||||
echo "Setting up local-repository"
|
||||
createrepo /local-repository
|
||||
|
||||
cat >/etc/yum.repos.d/local.repo <<EOL
|
||||
[local-repository]
|
||||
name=NVIDIA Container Toolkit Local Packages
|
||||
baseurl=file:///local-repository
|
||||
enabled=0
|
||||
gpgcheck=0
|
||||
protect=1
|
||||
EOL
|
||||
yum-config-manager --enable local-repository
|
||||
else
|
||||
echo "Setting up TEST repo: ${TEST_REPO}"
|
||||
sed -i -e 's#nvidia\.github\.io/libnvidia-container#${TEST_REPO}/libnvidia-container#g' /etc/yum.repos.d/nvidia-docker.repo
|
||||
yum-config-manager --enable libnvidia-container-experimental
|
||||
fi
|
||||
|
||||
exec bash $@
|
50
test/release/docker/ubuntu18.04/Dockerfile
Normal file
50
test/release/docker/ubuntu18.04/Dockerfile
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
FROM ubuntu:18.04
|
||||
|
||||
ARG DEBIAN_FRONTEND noninteractive
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
curl \
|
||||
gnupg2 \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
apt-utils \
|
||||
ruby ruby-dev rubygems build-essential
|
||||
|
||||
RUN gem install --no-document fpm
|
||||
|
||||
# We create and install a dummy docker package since these dependencies are out of
|
||||
# scope for the tests performed here.
|
||||
RUN fpm -s empty \
|
||||
-t deb \
|
||||
--description "A dummy package for docker.io_18.06.0" \
|
||||
-n docker.io --version 18.06.0 \
|
||||
-p /tmp/docker.deb \
|
||||
--deb-no-default-config-files \
|
||||
&& \
|
||||
dpkg -i /tmp/docker.deb \
|
||||
&& \
|
||||
rm -f /tmp/docker.deb
|
||||
|
||||
|
||||
ARG WORKFLOW=nvidia-docker
|
||||
RUN curl -s -L https://nvidia.github.io/${WORKFLOW}/gpgkey | apt-key add - \
|
||||
&& curl -s -L https://nvidia.github.io/${WORKFLOW}/ubuntu18.04/nvidia-docker.list | tee /etc/apt/sources.list.d/nvidia-docker.list \
|
||||
&& apt-get update
|
||||
|
||||
COPY entrypoint.sh /
|
||||
COPY install_repo.sh /
|
||||
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
34
test/release/docker/ubuntu18.04/entrypoint.sh
Executable file
34
test/release/docker/ubuntu18.04/entrypoint.sh
Executable file
@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This script is used to build the packages for the components of the NVIDIA
|
||||
# Container Stack. These include the nvidia-container-toolkit in this repository
|
||||
# as well as the components included in the third_party folder.
|
||||
# All required packages are generated in the specified dist folder.
|
||||
|
||||
: ${LOCAL_REPO_DIRECTORY:=/local-repository}
|
||||
if [[ -d ${LOCAL_REPO_DIRECTORY} ]]; then
|
||||
echo "Setting up local-repository"
|
||||
echo "deb [trusted=yes] file:/local-repository ./" > /etc/apt/sources.list.d/local.list
|
||||
$(cd /local-repository && apt-ftparchive packages . > Packages)
|
||||
elif [[ -n ${TEST_REPO} ]]; then
|
||||
./install_repo.sh ${TEST_REPO}
|
||||
else
|
||||
echo "Skipping repo setup"
|
||||
fi
|
||||
apt-get update
|
||||
|
||||
exec bash $@
|
25
test/release/docker/ubuntu18.04/install_repo.sh
Executable file
25
test/release/docker/ubuntu18.04/install_repo.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This script is used to build the packages for the components of the NVIDIA
|
||||
# Container Stack. These include the nvidia-container-toolkit in this repository
|
||||
# as well as the components included in the third_party folder.
|
||||
# All required packages are generated in the specified dist folder.
|
||||
|
||||
test_repo=$1
|
||||
echo "Setting up TEST repo: ${test_repo}"
|
||||
sed -i -e "s#nvidia\.github\.io/libnvidia-container#${test_repo}/libnvidia-container#g" /etc/apt/sources.list.d/nvidia-docker.list
|
||||
sed -i -e '/experimental/ s/^#//g' /etc/apt/sources.list.d/nvidia-docker.list
|
1
third_party/libnvidia-container
vendored
Submodule
1
third_party/libnvidia-container
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit d78f1f4afa7e8d809434e71cf717f0817a450b37
|
1
third_party/nvidia-container-runtime
vendored
Submodule
1
third_party/nvidia-container-runtime
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 2e721ab3b2202470e940fd5a29c1f681e761fbf0
|
1
third_party/nvidia-docker
vendored
Submodule
1
third_party/nvidia-docker
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 52ed1032c80be535c44822f67151a4b0a0e72658
|
Loading…
Reference in New Issue
Block a user