Compare commits

...

70 Commits

Author SHA1 Message Date
aptalca
7929311233 listen on both ipv4 and ipv6 2024-04-21 10:15:27 -04:00
LinuxServer-CI
eaeab3349e Bot Updating Package Versions 2024-04-20 20:16:51 +00:00
LinuxServer-CI
ff420fc155 Bot Updating Package Versions 2024-04-16 01:38:25 +00:00
LinuxServer-CI
818d5ade66 Bot Updating Package Versions 2024-04-08 21:42:04 +00:00
LinuxServer-CI
1d46aa716e Bot Updating Package Versions 2024-04-06 20:24:28 +00:00
LinuxServer-CI
17570939d5 Bot Updating Package Versions 2024-03-30 20:17:17 +00:00
LinuxServer-CI
0c5f0c4a76 Bot Updating Package Versions 2024-03-23 20:19:31 +00:00
LinuxServer-CI
73811e24e4 Bot Updating Package Versions 2024-03-14 20:44:29 +00:00
LinuxServer-CI
309d28852d Bot Updating Templated Files 2024-03-14 20:41:23 +00:00
LinuxServer-CI
67773aa923 Bot Updating Package Versions 2024-03-09 20:15:27 +00:00
LinuxServer-CI
b18e8877ad Bot Updating Package Versions 2024-03-03 23:42:53 +00:00
LinuxServer-CI
48262b8882 Bot Updating Package Versions 2024-03-02 20:16:44 +00:00
LinuxServer-CI
d2a5ef414a Bot Updating Package Versions 2024-02-28 23:41:09 +00:00
LinuxServer-CI
4931dd3b82 Bot Updating Templated Files 2024-02-24 20:16:35 +00:00
LinuxServer-CI
2d7ee3529b Bot Updating Templated Files 2024-02-24 20:14:53 +00:00
LinuxServer-CI
33e0d230c9 Bot Updating Package Versions 2024-02-09 22:41:54 +00:00
LinuxServer-CI
7d835391a9 Bot Updating Package Versions 2024-02-05 18:41:52 +00:00
LinuxServer-CI
3ce9c4e675 Bot Updating Package Versions 2024-02-03 20:19:18 +00:00
LinuxServer-CI
d6dbb82493 Bot Updating Package Versions 2024-01-27 20:16:19 +00:00
LinuxServer-CI
94e137e87f Bot Updating Package Versions 2024-01-22 17:41:54 +00:00
LinuxServer-CI
a9df15473e Bot Updating Package Versions 2024-01-20 20:18:35 +00:00
LinuxServer-CI
24919c86a2 Bot Updating Package Versions 2024-01-13 20:31:25 +00:00
LinuxServer-CI
a121f3a64a Bot Updating Templated Files 2024-01-13 20:19:04 +00:00
LinuxServer-CI
33e35e89b8 Bot Updating Templated Files 2024-01-13 20:16:52 +00:00
LinuxServer-CI
5e7e1b2bda Bot Updating Package Versions 2024-01-06 20:17:06 +00:00
LinuxServer-CI
4bfbc226a4 Bot Updating Package Versions 2023-12-21 22:40:42 +00:00
LinuxServer-CI
9c28be3eb4 Bot Updating Package Versions 2023-12-16 20:17:57 +00:00
LinuxServer-CI
dac826bc40 Bot Updating Package Versions 2023-12-09 20:17:51 +00:00
LinuxServer-CI
7e16b5c8be Bot Updating Package Versions 2023-11-29 21:41:18 +00:00
LinuxServer-CI
14c35108cf Bot Updating Package Versions 2023-11-25 20:17:26 +00:00
LinuxServer-CI
4dcc194840 Bot Updating Package Versions 2023-11-18 14:43:20 +00:00
LinuxServer-CI
71158be6b3 Bot Updating Templated Files 2023-11-18 14:39:32 +00:00
LinuxServer-CI
02a7bea1b8 Bot Updating Package Versions 2023-11-03 20:27:08 +00:00
LinuxServer-CI
b15152a498 Bot Updating Package Versions 2023-10-28 20:16:23 +00:00
LinuxServer-CI
18889ecd4b Bot Updating Package Versions 2023-10-20 20:40:27 +00:00
LinuxServer-CI
44345df372 Bot Updating Package Versions 2023-10-14 20:16:34 +00:00
LinuxServer-CI
64a00a9744 Bot Updating Package Versions 2023-10-07 20:21:16 +00:00
LinuxServer-CI
31e2f77d49 Bot Updating Templated Files 2023-10-07 20:17:17 +00:00
LinuxServer-CI
b03eace8f8 Bot Updating Templated Files 2023-10-07 20:14:40 +00:00
LinuxServer-CI
7912c721a5 Bot Updating Package Versions 2023-09-29 23:40:28 +00:00
LinuxServer-CI
bccc62b708 Bot Updating Package Versions 2023-09-23 02:05:40 +00:00
LinuxServer-CI
25396b74d1 Bot Updating Package Versions 2023-09-16 20:16:07 +00:00
LinuxServer-CI
cf4c57cd1e Bot Updating Package Versions 2023-08-26 20:15:10 +00:00
LinuxServer-CI
6d2cacc4c8 Bot Updating Package Versions 2023-08-19 20:16:29 +00:00
LinuxServer-CI
75e67677ed Bot Updating Package Versions 2023-08-05 20:15:11 +00:00
LinuxServer-CI
246e06bac6 Bot Updating Package Versions 2023-07-31 19:40:18 +00:00
LinuxServer-CI
55c3ecff1c Bot Updating Package Versions 2023-07-29 01:46:52 +00:00
LinuxServer-CI
7345ac6141 Bot Updating Package Versions 2023-07-21 23:41:37 +00:00
LinuxServer-CI
3de6bee22b Bot Updating Package Versions 2023-07-01 20:23:54 +00:00
LinuxServer-CI
0ae77a3f39 Bot Updating Templated Files 2023-07-01 20:20:04 +00:00
LinuxServer-CI
b47e34134c Bot Updating Templated Files 2023-07-01 20:18:40 +00:00
LinuxServer-CI
2f0fa20be3 Bot Updating Templated Files 2023-07-01 20:16:21 +00:00
LinuxServer-CI
e785f33bc1 Bot Updating Package Versions 2023-06-26 18:44:39 +00:00
LinuxServer-CI
d35b78301a Bot Updating Package Versions 2023-06-16 21:41:41 +00:00
LinuxServer-CI
624f63de3b Bot Updating Package Versions 2023-06-10 20:18:41 +00:00
LinuxServer-CI
8d3dd4a1b3 Bot Updating Package Versions 2023-06-03 20:16:54 +00:00
LinuxServer-CI
549e2e0208 Bot Updating Templated Files 2023-05-27 20:15:05 +00:00
LinuxServer-CI
8bc5a8902e Bot Updating Package Versions 2023-05-19 21:40:38 +00:00
LinuxServer-CI
1119a25e4c Bot Updating Package Versions 2023-05-06 20:15:55 +00:00
LinuxServer-CI
675ec78a74 Bot Updating Package Versions 2023-04-29 15:20:49 -05:00
LinuxServer-CI
16d7bd96af Bot Updating Templated Files 2023-04-29 15:17:11 -05:00
LinuxServer-CI
66da21de61 Bot Updating Templated Files 2023-04-29 15:15:34 -05:00
LinuxServer-CI
b8bcdc44b7 Bot Updating Package Versions 2023-04-21 18:45:35 +00:00
LinuxServer-CI
c2f09a6bb3 Bot Updating Package Versions 2023-04-15 15:16:58 -05:00
LinuxServer-CI
2a54204888 Bot Updating Templated Files 2023-04-02 07:28:31 -05:00
LinuxServer-CI
c8a2ba1566 Bot Updating Templated Files 2023-04-02 07:26:58 -05:00
LinuxServer-CI
ae51d14720 Bot Updating Templated Files 2023-04-02 07:25:29 -05:00
LinuxServer-CI
cb8df4d4ef Bot Updating Package Versions 2023-03-26 07:27:26 -05:00
LinuxServer-CI
8be39c9293 Bot Updating Package Versions 2023-03-16 19:49:00 +01:00
LinuxServer-CI
b83040e88f Bot Updating Package Versions 2023-03-12 07:28:49 -05:00
16 changed files with 1012 additions and 1094 deletions

View File

@@ -53,7 +53,6 @@ body:
options: options:
- x86-64 - x86-64
- arm64 - arm64
- armhf
validations: validations:
required: true required: true
- type: textarea - type: textarea
@@ -68,10 +67,10 @@ body:
- type: textarea - type: textarea
attributes: attributes:
description: | description: |
Provide a full docker log, output of "docker logs linuxserver.io" Provide a full docker log, output of "docker logs code-server"
label: Container logs label: Container logs
placeholder: | placeholder: |
Output of `docker logs linuxserver.io` Output of `docker logs code-server`
render: bash render: bash
validations: validations:
required: true required: true

View File

@@ -1,12 +0,0 @@
name: Comment on invalid interaction
on:
issues:
types:
- labeled
jobs:
add-comment-on-invalid:
if: github.event.label.name == 'invalid'
permissions:
issues: write
uses: linuxserver/github-workflows/.github/workflows/invalid-interaction-helper.yml@v1
secrets: inherit

16
.github/workflows/call_issue_pr_tracker.yml vendored Executable file
View File

@@ -0,0 +1,16 @@
name: Issue & PR Tracker
on:
issues:
types: [opened,reopened,labeled,unlabeled,closed]
pull_request_target:
types: [opened,reopened,review_requested,review_request_removed,labeled,unlabeled,closed]
pull_request_review:
types: [submitted,edited,dismissed]
jobs:
manage-project:
permissions:
issues: write
uses: linuxserver/github-workflows/.github/workflows/issue-pr-tracker.yml@v1
secrets: inherit

13
.github/workflows/call_issues_cron.yml vendored Executable file
View File

@@ -0,0 +1,13 @@
name: Mark stale issues and pull requests
on:
schedule:
- cron: '31 1 * * *'
workflow_dispatch:
jobs:
stale:
permissions:
issues: write
pull-requests: write
uses: linuxserver/github-workflows/.github/workflows/issues-cron.yml@v1
secrets: inherit

View File

@@ -7,16 +7,18 @@ jobs:
external-trigger-master: external-trigger-master:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3.1.0 - uses: actions/checkout@v4.1.1
- name: External Trigger - name: External Trigger
if: github.ref == 'refs/heads/master' if: github.ref == 'refs/heads/master'
run: | run: |
if [ -n "${{ secrets.PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER }}" ]; then if [ -n "${{ secrets.PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER }}" ]; then
echo "**** Github secret PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER is set; skipping trigger. ****" echo "**** Github secret PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER is set; skipping trigger. ****"
echo "Github secret \`PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
fi fi
echo "**** External trigger running off of master branch. To disable this trigger, set a Github secret named \"PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER\". ****" echo "**** External trigger running off of master branch. To disable this trigger, set a Github secret named \"PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER\". ****"
echo "External trigger running off of master branch. To disable this trigger, set a Github secret named \`PAUSE_EXTERNAL_TRIGGER_CODE_SERVER_MASTER\`" >> $GITHUB_STEP_SUMMARY
echo "**** Retrieving external version ****" echo "**** Retrieving external version ****"
EXT_RELEASE=$(curl -u ${{ secrets.CR_USER }}:${{ secrets.CR_PAT }} -sX GET https://api.github.com/repos/coder/code-server/releases/latest | jq -r '.tag_name' | sed 's|^v||') EXT_RELEASE=$(curl -u ${{ secrets.CR_USER }}:${{ secrets.CR_PAT }} -sX GET https://api.github.com/repos/coder/code-server/releases/latest | jq -r '.tag_name' | sed 's|^v||')
if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then
@@ -30,6 +32,7 @@ jobs:
fi fi
EXT_RELEASE=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g') EXT_RELEASE=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g')
echo "**** External version: ${EXT_RELEASE} ****" echo "**** External version: ${EXT_RELEASE} ****"
echo "External version: ${EXT_RELEASE}" >> $GITHUB_STEP_SUMMARY
echo "**** Retrieving last pushed version ****" echo "**** Retrieving last pushed version ****"
image="linuxserver/code-server" image="linuxserver/code-server"
tag="latest" tag="latest"
@@ -65,14 +68,18 @@ jobs:
exit 1 exit 1
fi fi
echo "**** Last pushed version: ${IMAGE_VERSION} ****" echo "**** Last pushed version: ${IMAGE_VERSION} ****"
echo "Last pushed version: ${IMAGE_VERSION}" >> $GITHUB_STEP_SUMMARY
if [ "${EXT_RELEASE}" == "${IMAGE_VERSION}" ]; then if [ "${EXT_RELEASE}" == "${IMAGE_VERSION}" ]; then
echo "**** Version ${EXT_RELEASE} already pushed, exiting ****" echo "**** Version ${EXT_RELEASE} already pushed, exiting ****"
echo "Version ${EXT_RELEASE} already pushed, exiting" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting ****" echo "**** New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting ****"
echo "New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
else else
echo "**** New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build ****" echo "**** New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build ****"
echo "New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \ response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/buildWithParameters?PACKAGE_CHECK=false \ https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/buildWithParameters?PACKAGE_CHECK=false \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|") --user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
@@ -82,6 +89,7 @@ jobs:
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url') buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}" buildurl="${buildurl%$'\r'}"
echo "**** Jenkins job build url: ${buildurl} ****" echo "**** Jenkins job build url: ${buildurl} ****"
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****" echo "**** Attempting to change the Jenkins job description ****"
curl -iX POST \ curl -iX POST \
"${buildurl}submitDescription" \ "${buildurl}submitDescription" \

View File

@@ -2,14 +2,14 @@ name: External Trigger Scheduler
on: on:
schedule: schedule:
- cron: '41 * * * *' - cron: '32 * * * *'
workflow_dispatch: workflow_dispatch:
jobs: jobs:
external-trigger-scheduler: external-trigger-scheduler:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3.1.0 - uses: actions/checkout@v4.1.1
with: with:
fetch-depth: '0' fetch-depth: '0'
@@ -17,18 +17,18 @@ jobs:
run: | run: |
echo "**** Branches found: ****" echo "**** Branches found: ****"
git for-each-ref --format='%(refname:short)' refs/remotes git for-each-ref --format='%(refname:short)' refs/remotes
echo "**** Pulling the yq docker image ****"
docker pull ghcr.io/linuxserver/yq
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes) for br in $(git for-each-ref --format='%(refname:short)' refs/remotes)
do do
br=$(echo "$br" | sed 's|origin/||g') br=$(echo "$br" | sed 's|origin/||g')
echo "**** Evaluating branch ${br} ****" echo "**** Evaluating branch ${br} ****"
ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/jenkins-vars.yml \ ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/jenkins-vars.yml)
| docker run --rm -i --entrypoint yq ghcr.io/linuxserver/yq -r .ls_branch) ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch')
if [ "$br" == "$ls_branch" ]; then ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type')
echo "**** Branch ${br} appears to be live; checking workflow. ****" if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then
echo "**** Branch ${br} appears to be live and trigger is not os; checking workflow. ****"
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then
echo "**** Workflow exists. Triggering external trigger workflow for branch ${br} ****." echo "**** Workflow exists. Triggering external trigger workflow for branch ${br} ****."
echo "Triggering external trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
curl -iX POST \ curl -iX POST \
-H "Authorization: token ${{ secrets.CR_PAT }}" \ -H "Authorization: token ${{ secrets.CR_PAT }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
@@ -36,8 +36,10 @@ jobs:
https://api.github.com/repos/linuxserver/docker-code-server/actions/workflows/external_trigger.yml/dispatches https://api.github.com/repos/linuxserver/docker-code-server/actions/workflows/external_trigger.yml/dispatches
else else
echo "**** Workflow doesn't exist; skipping trigger. ****" echo "**** Workflow doesn't exist; skipping trigger. ****"
echo "Skipping branch ${br} due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
fi fi
else else
echo "**** ${br} appears to be a dev branch; skipping trigger. ****" echo "**** ${br} is either a dev branch, or has no external version; skipping trigger. ****"
echo "Skipping branch ${br} due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
fi fi
done done

View File

@@ -7,20 +7,23 @@ jobs:
package-trigger-master: package-trigger-master:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3.1.0 - uses: actions/checkout@v4.1.1
- name: Package Trigger - name: Package Trigger
if: github.ref == 'refs/heads/master' if: github.ref == 'refs/heads/master'
run: | run: |
if [ -n "${{ secrets.PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER }}" ]; then if [ -n "${{ secrets.PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER }}" ]; then
echo "**** Github secret PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER is set; skipping trigger. ****" echo "**** Github secret PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER is set; skipping trigger. ****"
echo "Github secret \`PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
fi fi
if [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then if [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** There already seems to be an active build on Jenkins; skipping package trigger ****" echo "**** There already seems to be an active build on Jenkins; skipping package trigger ****"
echo "There already seems to be an active build on Jenkins; skipping package trigger" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
fi fi
echo "**** Package trigger running off of master branch. To disable, set a Github secret named \"PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER\". ****" echo "**** Package trigger running off of master branch. To disable, set a Github secret named \"PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER\". ****"
echo "Package trigger running off of master branch. To disable, set a Github secret named \`PAUSE_PACKAGE_TRIGGER_CODE_SERVER_MASTER\`" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \ response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/buildWithParameters?PACKAGE_CHECK=true \ https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-code-server/job/master/buildWithParameters?PACKAGE_CHECK=true \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|") --user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
@@ -30,6 +33,7 @@ jobs:
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url') buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}" buildurl="${buildurl%$'\r'}"
echo "**** Jenkins job build url: ${buildurl} ****" echo "**** Jenkins job build url: ${buildurl} ****"
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****" echo "**** Attempting to change the Jenkins job description ****"
curl -iX POST \ curl -iX POST \
"${buildurl}submitDescription" \ "${buildurl}submitDescription" \

View File

@@ -2,14 +2,14 @@ name: Package Trigger Scheduler
on: on:
schedule: schedule:
- cron: '00 12 * * 0' - cron: '0 20 * * 6'
workflow_dispatch: workflow_dispatch:
jobs: jobs:
package-trigger-scheduler: package-trigger-scheduler:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3.1.0 - uses: actions/checkout@v4.1.1
with: with:
fetch-depth: '0' fetch-depth: '0'
@@ -17,18 +17,16 @@ jobs:
run: | run: |
echo "**** Branches found: ****" echo "**** Branches found: ****"
git for-each-ref --format='%(refname:short)' refs/remotes git for-each-ref --format='%(refname:short)' refs/remotes
echo "**** Pulling the yq docker image ****"
docker pull ghcr.io/linuxserver/yq
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes) for br in $(git for-each-ref --format='%(refname:short)' refs/remotes)
do do
br=$(echo "$br" | sed 's|origin/||g') br=$(echo "$br" | sed 's|origin/||g')
echo "**** Evaluating branch ${br} ****" echo "**** Evaluating branch ${br} ****"
ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/jenkins-vars.yml \ ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/jenkins-vars.yml | yq -r '.ls_branch')
| docker run --rm -i --entrypoint yq ghcr.io/linuxserver/yq -r .ls_branch)
if [ "${br}" == "${ls_branch}" ]; then if [ "${br}" == "${ls_branch}" ]; then
echo "**** Branch ${br} appears to be live; checking workflow. ****" echo "**** Branch ${br} appears to be live; checking workflow. ****"
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/.github/workflows/package_trigger.yml > /dev/null 2>&1; then if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-code-server/${br}/.github/workflows/package_trigger.yml > /dev/null 2>&1; then
echo "**** Workflow exists. Triggering package trigger workflow for branch ${br}. ****" echo "**** Workflow exists. Triggering package trigger workflow for branch ${br}. ****"
echo "Triggering package trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
triggered_branches="${triggered_branches}${br} " triggered_branches="${triggered_branches}${br} "
curl -iX POST \ curl -iX POST \
-H "Authorization: token ${{ secrets.CR_PAT }}" \ -H "Authorization: token ${{ secrets.CR_PAT }}" \
@@ -38,9 +36,11 @@ jobs:
sleep 30 sleep 30
else else
echo "**** Workflow doesn't exist; skipping trigger. ****" echo "**** Workflow doesn't exist; skipping trigger. ****"
echo "Skipping branch ${br} due to no package trigger workflow present." >> $GITHUB_STEP_SUMMARY
fi fi
else else
echo "**** ${br} appears to be a dev branch; skipping trigger. ****" echo "**** ${br} appears to be a dev branch; skipping trigger. ****"
echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY
fi fi
done done
echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****" echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****"

View File

@@ -1,9 +1,10 @@
name: Permission check name: Permission check
on: on:
pull_request: pull_request_target:
paths: paths:
- '**/run' - '**/run'
- '**/finish' - '**/finish'
- '**/check'
jobs: jobs:
permission_check: permission_check:
uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1 uses: linuxserver/github-workflows/.github/workflows/init-svc-executable-permissions.yml@v1

View File

@@ -1,23 +0,0 @@
name: Mark stale issues and pull requests
on:
schedule:
- cron: "30 1 * * *"
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v6.0.1
with:
stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions."
stale-pr-message: "This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions."
stale-issue-label: 'no-issue-activity'
stale-pr-label: 'no-pr-activity'
days-before-stale: 30
days-before-close: 365
exempt-issue-labels: 'awaiting-approval,work-in-progress'
exempt-pr-labels: 'awaiting-approval,work-in-progress'
repo-token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,48 +0,0 @@
FROM ghcr.io/linuxserver/baseimage-ubuntu:arm32v7-jammy
# set version label
ARG BUILD_DATE
ARG VERSION
ARG CODE_RELEASE
LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DATE}"
LABEL maintainer="aptalca"
# environment settings
ARG DEBIAN_FRONTEND="noninteractive"
ENV HOME="/config"
RUN \
echo "**** install runtime dependencies ****" && \
apt-get update && \
apt-get install -y \
git \
jq \
libatomic1 \
nano \
net-tools \
netcat \
sudo && \
echo "**** install code-server ****" && \
if [ -z ${CODE_RELEASE+x} ]; then \
CODE_RELEASE=$(curl -sX GET https://api.github.com/repos/coder/code-server/releases/latest \
| awk '/tag_name/{print $4;exit}' FS='[""]' | sed 's|^v||'); \
fi && \
mkdir -p /app/code-server && \
curl -o \
/tmp/code-server.tar.gz -L \
"https://github.com/coder/code-server/releases/download/v${CODE_RELEASE}/code-server-${CODE_RELEASE}-linux-armv7l.tar.gz" && \
tar xf /tmp/code-server.tar.gz -C \
/app/code-server --strip-components=1 && \
echo "**** clean up ****" && \
apt-get clean && \
rm -rf \
/config/* \
/tmp/* \
/var/lib/apt/lists/* \
/var/tmp/*
# add local files
COPY /root /
# ports and volumes
EXPOSE 8443

508
Jenkinsfile vendored
View File

@@ -16,7 +16,7 @@ pipeline {
GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab') GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab')
GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0') GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0')
GITLAB_NAMESPACE=credentials('gitlab-namespace-id') GITLAB_NAMESPACE=credentials('gitlab-namespace-id')
SCARF_TOKEN=credentials('scarf_api_key') DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat')
CONTAINER_NAME = 'code-server' CONTAINER_NAME = 'code-server'
BUILD_VERSION_ARG = 'CODE_RELEASE' BUILD_VERSION_ARG = 'CODE_RELEASE'
LS_USER = 'linuxserver' LS_USER = 'linuxserver'
@@ -39,10 +39,16 @@ pipeline {
// Setup all the basic environment variables needed for the build // Setup all the basic environment variables needed for the build
stage("Set ENV Variables base"){ stage("Set ENV Variables base"){
steps{ steps{
sh '''#! /bin/bash
containers=$(docker ps -aq)
if [[ -n "${containers}" ]]; then
docker stop ${containers}
fi
docker system prune -af --volumes || : '''
script{ script{
env.EXIT_STATUS = '' env.EXIT_STATUS = ''
env.LS_RELEASE = sh( env.LS_RELEASE = sh(
script: '''docker run --rm ghcr.io/linuxserver/alexeiled-skopeo sh -c 'skopeo inspect docker://docker.io/'${DOCKERHUB_IMAGE}':latest 2>/dev/null' | jq -r '.Labels.build_version' | awk '{print $3}' | grep '\\-ls' || : ''', script: '''docker run --rm quay.io/skopeo/stable:v1 inspect docker://ghcr.io/${LS_USER}/${CONTAINER_NAME}:latest 2>/dev/null | jq -r '.Labels.build_version' | awk '{print $3}' | grep '\\-ls' || : ''',
returnStdout: true).trim() returnStdout: true).trim()
env.LS_RELEASE_NOTES = sh( env.LS_RELEASE_NOTES = sh(
script: '''cat readme-vars.yml | awk -F \\" '/date: "[0-9][0-9].[0-9][0-9].[0-9][0-9]:/ {print $4;exit;}' | sed -E ':a;N;$!ba;s/\\r{0,1}\\n/\\\\n/g' ''', script: '''cat readme-vars.yml | awk -F \\" '/date: "[0-9][0-9].[0-9][0-9].[0-9][0-9]:/ {print $4;exit;}' | sed -E ':a;N;$!ba;s/\\r{0,1}\\n/\\\\n/g' ''',
@@ -53,11 +59,16 @@ pipeline {
env.COMMIT_SHA = sh( env.COMMIT_SHA = sh(
script: '''git rev-parse HEAD''', script: '''git rev-parse HEAD''',
returnStdout: true).trim() returnStdout: true).trim()
env.GH_DEFAULT_BRANCH = sh(
script: '''git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||' ''',
returnStdout: true).trim()
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/'
env.PULL_REQUEST = env.CHANGE_ID env.PULL_REQUEST = env.CHANGE_ID
env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/stale.yml ./.github/workflows/call_invalid_helper.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./.github/workflows/package_trigger.yml' env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./.github/workflows/package_trigger.yml'
} }
sh '''#! /bin/bash
echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" '''
script{ script{
env.LS_RELEASE_NUMBER = sh( env.LS_RELEASE_NUMBER = sh(
script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''', script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''',
@@ -115,7 +126,7 @@ pipeline {
steps{ steps{
script{ script{
env.EXT_RELEASE_CLEAN = sh( env.EXT_RELEASE_CLEAN = sh(
script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g' ''', script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/ ]//g' ''',
returnStdout: true).trim() returnStdout: true).trim()
def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/ def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/
@@ -133,7 +144,7 @@ pipeline {
} }
if (env.SEMVER != null) { if (env.SEMVER != null) {
if (BRANCH_NAME != "master" && BRANCH_NAME != "main") { if (BRANCH_NAME != "${env.GH_DEFAULT_BRANCH}") {
env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}" env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}"
} }
println("SEMVER: ${env.SEMVER}") println("SEMVER: ${env.SEMVER}")
@@ -157,7 +168,7 @@ pipeline {
env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/' + env.CONTAINER_NAME env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/' + env.CONTAINER_NAME
env.QUAYIMAGE = 'quay.io/linuxserver.io/' + env.CONTAINER_NAME env.QUAYIMAGE = 'quay.io/linuxserver.io/' + env.CONTAINER_NAME
if (env.MULTIARCH == 'true') { if (env.MULTIARCH == 'true') {
env.CI_TAGS = 'amd64-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER + '|arm32v7-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER + '|arm64v8-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.CI_TAGS = 'amd64-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER + '|arm64v8-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
} else { } else {
env.CI_TAGS = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.CI_TAGS = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
} }
@@ -180,7 +191,7 @@ pipeline {
env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lsiodev-' + env.CONTAINER_NAME env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lsiodev-' + env.CONTAINER_NAME
env.QUAYIMAGE = 'quay.io/linuxserver.io/lsiodev-' + env.CONTAINER_NAME env.QUAYIMAGE = 'quay.io/linuxserver.io/lsiodev-' + env.CONTAINER_NAME
if (env.MULTIARCH == 'true') { if (env.MULTIARCH == 'true') {
env.CI_TAGS = 'amd64-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '|arm32v7-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '|arm64v8-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.CI_TAGS = 'amd64-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '|arm64v8-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
} else { } else {
env.CI_TAGS = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.CI_TAGS = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
} }
@@ -203,12 +214,12 @@ pipeline {
env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lspipepr-' + env.CONTAINER_NAME env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lspipepr-' + env.CONTAINER_NAME
env.QUAYIMAGE = 'quay.io/linuxserver.io/lspipepr-' + env.CONTAINER_NAME env.QUAYIMAGE = 'quay.io/linuxserver.io/lspipepr-' + env.CONTAINER_NAME
if (env.MULTIARCH == 'true') { if (env.MULTIARCH == 'true') {
env.CI_TAGS = 'amd64-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-pr-' + env.PULL_REQUEST + '|arm32v7-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-pr-' + env.PULL_REQUEST + '|arm64v8-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-pr-' + env.PULL_REQUEST env.CI_TAGS = 'amd64-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST + '|arm64v8-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
} else { } else {
env.CI_TAGS = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-pr-' + env.PULL_REQUEST env.CI_TAGS = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
} }
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-pr-' + env.PULL_REQUEST env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-pr-' + env.PULL_REQUEST env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/'
@@ -228,19 +239,18 @@ pipeline {
script{ script{
env.SHELLCHECK_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/shellcheck-result.xml' env.SHELLCHECK_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/shellcheck-result.xml'
} }
sh '''curl -sL https://raw.githubusercontent.com/linuxserver/docker-shellcheck/master/checkrun.sh | /bin/bash''' sh '''curl -sL https://raw.githubusercontent.com/linuxserver/docker-jenkins-builder/master/checkrun.sh | /bin/bash'''
sh '''#! /bin/bash sh '''#! /bin/bash
set -e
docker pull ghcr.io/linuxserver/lsiodev-spaces-file-upload:latest
docker run --rm \ docker run --rm \
-e DESTINATION=\"${IMAGE}/${META_TAG}/shellcheck-result.xml\" \ -v ${WORKSPACE}:/mnt \
-e FILE_NAME="shellcheck-result.xml" \ -e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \
-e MIMETYPE="text/xml" \ -e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \
-v ${WORKSPACE}:/mnt \ ghcr.io/linuxserver/baseimage-alpine:3.19 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\
-e SECRET_KEY=\"${S3_SECRET}\" \ apk add --no-cache python3 && \
-e ACCESS_KEY=\"${S3_KEY}\" \ python3 -m venv /lsiopy && \
-t ghcr.io/linuxserver/lsiodev-spaces-file-upload:latest \ pip install --no-cache-dir -U pip && \
python /upload.py''' pip install --no-cache-dir s3cmd && \
s3cmd put --no-preserve --acl-public -m text/xml /mnt/shellcheck-result.xml s3://ci-tests.linuxserver.io/${IMAGE}/${META_TAG}/shellcheck-result.xml" || :'''
} }
} }
} }
@@ -258,8 +268,15 @@ pipeline {
set -e set -e
TEMPDIR=$(mktemp -d) TEMPDIR=$(mktemp -d)
docker pull ghcr.io/linuxserver/jenkins-builder:latest docker pull ghcr.io/linuxserver/jenkins-builder:latest
docker run --rm -e CONTAINER_NAME=${CONTAINER_NAME} -e GITHUB_BRANCH=master -v ${TEMPDIR}:/ansible/jenkins ghcr.io/linuxserver/jenkins-builder:latest # Cloned repo paths for templating:
# Stage 1 - Jenkinsfile update # ${TEMPDIR}/docker-${CONTAINER_NAME}: Cloned branch master of ${LS_USER}/${LS_REPO} for running the jenkins builder on
# ${TEMPDIR}/repo/${LS_REPO}: Cloned branch master of ${LS_USER}/${LS_REPO} for commiting various templated file changes and pushing back to Github
# ${TEMPDIR}/docs/docker-documentation: Cloned docs repo for pushing docs updates to Github
# ${TEMPDIR}/unraid/docker-templates: Cloned docker-templates repo to check for logos
# ${TEMPDIR}/unraid/templates: Cloned templates repo for commiting unraid template changes and pushing back to Github
git clone --branch master --depth 1 https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/docker-${CONTAINER_NAME}
docker run --rm -v ${TEMPDIR}/docker-${CONTAINER_NAME}:/tmp -e LOCAL=true ghcr.io/linuxserver/jenkins-builder:latest
echo "Starting Stage 1 - Jenkinsfile update"
if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then
mkdir -p ${TEMPDIR}/repo mkdir -p ${TEMPDIR}/repo
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
@@ -268,16 +285,17 @@ pipeline {
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/
git add Jenkinsfile git add Jenkinsfile
git commit -m 'Bot Updating Templated Files' git commit -m 'Bot Updating Templated Files'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Updating Jenkinsfile" echo "Updating Jenkinsfile and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR} rm -Rf ${TEMPDIR}
exit 0 exit 0
else else
echo "Jenkinsfile is up to date." echo "Jenkinsfile is up to date."
fi fi
# Stage 2 - Delete old templates echo "Starting Stage 2 - Delete old templates"
OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md\n.github/ISSUE_TEMPLATE/issue.bug.md\n.github/ISSUE_TEMPLATE/issue.feature.md" OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml Dockerfile.armhf"
for i in ${OLD_TEMPLATES}; do for i in ${OLD_TEMPLATES}; do
if [[ -f "${i}" ]]; then if [[ -f "${i}" ]]; then
TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}" TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}"
@@ -292,15 +310,16 @@ pipeline {
git rm "${i}" git rm "${i}"
done done
git commit -m 'Bot Updating Templated Files' git commit -m 'Bot Updating Templated Files'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Deleting old templates" echo "Deleting old/deprecated templates and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR} rm -Rf ${TEMPDIR}
exit 0 exit 0
else else
echo "No templates to delete" echo "No templates to delete"
fi fi
# Stage 3 - Update templates echo "Starting Stage 3 - Update templates"
CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
cd ${TEMPDIR}/docker-${CONTAINER_NAME} cd ${TEMPDIR}/docker-${CONTAINER_NAME}
NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
@@ -313,35 +332,55 @@ pipeline {
mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/workflows mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/workflows
mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/ISSUE_TEMPLATE mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/ISSUE_TEMPLATE
cp --parents ${TEMPLATED_FILES} ${TEMPDIR}/repo/${LS_REPO}/ || : cp --parents ${TEMPLATED_FILES} ${TEMPDIR}/repo/${LS_REPO}/ || :
cp --parents readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/ || :
cd ${TEMPDIR}/repo/${LS_REPO}/ cd ${TEMPDIR}/repo/${LS_REPO}/
if ! grep -q '.jenkins-external' .gitignore 2>/dev/null; then if ! grep -q '.jenkins-external' .gitignore 2>/dev/null; then
echo ".jenkins-external" >> .gitignore echo ".jenkins-external" >> .gitignore
git add .gitignore git add .gitignore
fi fi
git add ${TEMPLATED_FILES} git add readme-vars.yml ${TEMPLATED_FILES}
git commit -m 'Bot Updating Templated Files' git commit -m 'Bot Updating Templated Files'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Updating templates and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR}
exit 0
else else
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "No templates to update"
fi fi
mkdir -p ${TEMPDIR}/gitbook echo "Starting Stage 4 - External repo updates: Docs, Unraid Template and Readme Sync to Docker Hub"
git clone https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/gitbook/docker-documentation mkdir -p ${TEMPDIR}/docs
if [[ ("${BRANCH_NAME}" == "master") || ("${BRANCH_NAME}" == "main") ]] && [[ (! -f ${TEMPDIR}/gitbook/docker-documentation/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/gitbook/docker-documentation/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then git clone https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/docs/docker-documentation
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/gitbook/docker-documentation/images/ if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then
cd ${TEMPDIR}/gitbook/docker-documentation/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/docs/docker-documentation/docs/images/
git add images/docker-${CONTAINER_NAME}.md cd ${TEMPDIR}/docs/docker-documentation
GH_DOCS_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
git add docs/images/docker-${CONTAINER_NAME}.md
echo "Updating docs repo"
git commit -m 'Bot Updating Documentation' git commit -m 'Bot Updating Documentation'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} || \
(MAXWAIT="10" && echo "Push to docs failed, trying again in ${MAXWAIT} seconds" && \
sleep $((RANDOM % MAXWAIT)) && \
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase && \
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH})
else
echo "Docs update not needed, skipping"
fi fi
mkdir -p ${TEMPDIR}/unraid mkdir -p ${TEMPDIR}/unraid
git clone https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates git clone https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates
git clone https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates git clone https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates
if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
elif [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-icon.png ]]; then
sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-icon.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml
fi fi
if [[ ("${BRANCH_NAME}" == "master") || ("${BRANCH_NAME}" == "main") ]] && [[ (! -f ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml) || ("$(md5sum ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml | awk '{ print $1 }')") ]]; then if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml) || ("$(md5sum ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml | awk '{ print $1 }')") ]]; then
echo "Updating Unraid template"
cd ${TEMPDIR}/unraid/templates/ cd ${TEMPDIR}/unraid/templates/
GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
if grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then if grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then
echo "Image is on the ignore list, marking Unraid template as deprecated" echo "Image is on the ignore list, marking Unraid template as deprecated"
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/
@@ -353,7 +392,42 @@ pipeline {
git add unraid/${CONTAINER_NAME}.xml git add unraid/${CONTAINER_NAME}.xml
git commit -m 'Bot Updating Unraid Template' git commit -m 'Bot Updating Unraid Template'
fi fi
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} --rebase
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} || \
(MAXWAIT="10" && echo "Push to unraid templates failed, trying again in ${MAXWAIT} seconds" && \
sleep $((RANDOM % MAXWAIT)) && \
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} --rebase && \
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH})
else
echo "No updates to Unraid template needed, skipping"
fi
if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]]; then
if [[ $(cat ${TEMPDIR}/docker-${CONTAINER_NAME}/README.md | wc -m) -gt 25000 ]]; then
echo "Readme is longer than 25,000 characters. Syncing the lite version to Docker Hub"
DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/README.lite"
else
echo "Syncing readme to Docker Hub"
DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/README.md"
fi
if curl -s https://hub.docker.com/v2/namespaces/${DOCKERHUB_IMAGE%%/*}/repositories/${DOCKERHUB_IMAGE##*/}/tags | jq -r '.message' | grep -q 404; then
echo "Docker Hub endpoint doesn't exist. Creating endpoint first."
DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token')
curl -s \
-H "Authorization: JWT ${DH_TOKEN}" \
-H "Content-Type: application/json" \
-X POST \
-d '{"name":"'${DOCKERHUB_IMAGE##*/}'", "namespace":"'${DOCKERHUB_IMAGE%%/*}'"}' \
https://hub.docker.com/v2/repositories/ || :
fi
DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token')
curl -s \
-H "Authorization: JWT ${DH_TOKEN}" \
-H "Content-Type: application/json" \
-X PATCH \
-d "{\\"full_description\\":$(jq -Rsa . ${DH_README_SYNC_PATH})}" \
https://hub.docker.com/v2/repositories/${DOCKERHUB_IMAGE} || :
else
echo "Not the default Github branch. Skipping readme sync to Docker Hub."
fi fi
rm -Rf ${TEMPDIR}''' rm -Rf ${TEMPDIR}'''
script{ script{
@@ -379,6 +453,26 @@ pipeline {
} }
} }
} }
// If this is a master build check the S6 service file perms
stage("Check S6 Service file Permissions"){
when {
branch "master"
environment name: 'CHANGE_ID', value: ''
environment name: 'EXIT_STATUS', value: ''
}
steps {
script{
sh '''#! /bin/bash
WRONG_PERM=$(find ./ -path "./.git" -prune -o \\( -name "run" -o -name "finish" -o -name "check" \\) -not -perm -u=x,g=x,o=x -print)
if [[ -n "${WRONG_PERM}" ]]; then
echo "The following S6 service files are missing the executable bit; canceling the faulty build: ${WRONG_PERM}"
exit 1
else
echo "S6 service file perms look good."
fi '''
}
}
}
/* ####################### /* #######################
GitLab Mirroring GitLab Mirroring
####################### */ ####################### */
@@ -389,44 +483,16 @@ pipeline {
} }
steps{ steps{
sh '''curl -H "Content-Type: application/json" -H "Private-Token: ${GITLAB_TOKEN}" -X POST https://gitlab.com/api/v4/projects \ sh '''curl -H "Content-Type: application/json" -H "Private-Token: ${GITLAB_TOKEN}" -X POST https://gitlab.com/api/v4/projects \
-d '{"namespace_id":'${GITLAB_NAMESPACE}',\ -d '{"namespace_id":'${GITLAB_NAMESPACE}',\
"name":"'${LS_REPO}'", "name":"'${LS_REPO}'",
"mirror":true,\ "mirror":true,\
"import_url":"https://github.com/linuxserver/'${LS_REPO}'.git",\ "import_url":"https://github.com/linuxserver/'${LS_REPO}'.git",\
"issues_access_level":"disabled",\ "issues_access_level":"disabled",\
"merge_requests_access_level":"disabled",\ "merge_requests_access_level":"disabled",\
"repository_access_level":"enabled",\ "repository_access_level":"enabled",\
"visibility":"public"}' ''' "visibility":"public"}' '''
} sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \
} -d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" '''
/* #######################
Scarf.sh package registry
####################### */
// Add package to Scarf.sh and set permissions
stage("Scarf.sh package registry"){
when {
branch "master"
environment name: 'EXIT_STATUS', value: ''
}
steps{
sh '''#! /bin/bash
set -e
PACKAGE_UUID=$(curl -X GET -H "Authorization: Bearer ${SCARF_TOKEN}" https://scarf.sh/api/v1/organizations/linuxserver-ci/packages | jq -r '.[] | select(.name=="linuxserver/code-server") | .uuid')
if [ -z "${PACKAGE_UUID}" ]; then
echo "Adding package to Scarf.sh"
curl -sX POST https://scarf.sh/api/v1/organizations/linuxserver-ci/packages \
-H "Authorization: Bearer ${SCARF_TOKEN}" \
-H "Content-Type: application/json" \
-d '{"name":"linuxserver/code-server",\
"shortDescription":"example description",\
"libraryType":"docker",\
"website":"https://github.com/linuxserver/docker-code-server",\
"backendUrl":"https://ghcr.io/linuxserver/code-server",\
"publicUrl":"https://lscr.io/linuxserver/code-server"}' || :
else
echo "Package already exists on Scarf.sh"
fi
'''
} }
} }
/* ############### /* ###############
@@ -491,41 +557,6 @@ pipeline {
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
} }
} }
stage('Build ARMHF') {
agent {
label 'ARMHF'
}
steps {
echo "Running on node: ${NODE_NAME}"
echo 'Logging into Github'
sh '''#! /bin/bash
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
'''
sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.armhf"
sh "docker buildx build \
--label \"org.opencontainers.image.created=${GITHUB_DATE}\" \
--label \"org.opencontainers.image.authors=linuxserver.io\" \
--label \"org.opencontainers.image.url=https://github.com/linuxserver/docker-code-server/packages\" \
--label \"org.opencontainers.image.documentation=https://docs.linuxserver.io/images/docker-code-server\" \
--label \"org.opencontainers.image.source=https://github.com/linuxserver/docker-code-server\" \
--label \"org.opencontainers.image.version=${EXT_RELEASE_CLEAN}-ls${LS_TAG_NUMBER}\" \
--label \"org.opencontainers.image.revision=${COMMIT_SHA}\" \
--label \"org.opencontainers.image.vendor=linuxserver.io\" \
--label \"org.opencontainers.image.licenses=GPL-3.0-only\" \
--label \"org.opencontainers.image.ref.name=${COMMIT_SHA}\" \
--label \"org.opencontainers.image.title=Code-server\" \
--label \"org.opencontainers.image.description=[Code-server](https://coder.com) is VS Code running on a remote server, accessible through the browser. - Code on your Chromebook, tablet, and laptop with a consistent dev environment. - If you have a Windows or Mac workstation, more easily develop for Linux. - Take advantage of large cloud servers to speed up tests, compilations, downloads, and more. - Preserve battery life when you're on the go. - All intensive computation runs on your server. - You're no longer running excess instances of Chrome.\" \
--no-cache --pull -f Dockerfile.armhf -t ${IMAGE}:arm32v7-${META_TAG} --platform=linux/arm/v7 \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh "docker tag ${IMAGE}:arm32v7-${META_TAG} ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER}"
retry(5) {
sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER}"
}
sh '''docker rmi \
${IMAGE}:arm32v7-${META_TAG} \
ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER} || :'''
}
}
stage('Build ARM64') { stage('Build ARM64') {
agent { agent {
label 'ARM64' label 'ARM64'
@@ -556,9 +587,12 @@ pipeline {
retry(5) { retry(5) {
sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}"
} }
sh '''docker rmi \ sh '''#! /bin/bash
${IMAGE}:arm64v8-${META_TAG} \ containers=$(docker ps -aq)
ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} || :''' if [[ -n "${containers}" ]]; then
docker stop ${containers}
fi
docker system prune -af --volumes || : '''
} }
} }
} }
@@ -595,7 +629,8 @@ pipeline {
wait wait
git add package_versions.txt git add package_versions.txt
git commit -m 'Bot Updating Package Versions' git commit -m 'Bot Updating Package Versions'
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git --all git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER} echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER}
echo "Package tag updated, stopping build process" echo "Package tag updated, stopping build process"
else else
@@ -619,13 +654,6 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
sh '''#! /bin/bash
echo "Packages were updated. Cleaning up the image and exiting."
if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" == "false" ]; then
docker rmi ${IMAGE}:amd64-${META_TAG}
else
docker rmi ${IMAGE}:${META_TAG}
fi'''
script{ script{
env.EXIT_STATUS = 'ABORTED' env.EXIT_STATUS = 'ABORTED'
} }
@@ -643,13 +671,6 @@ pipeline {
} }
} }
steps { steps {
sh '''#! /bin/bash
echo "There are no package updates. Cleaning up the image and exiting."
if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" == "false" ]; then
docker rmi ${IMAGE}:amd64-${META_TAG}
else
docker rmi ${IMAGE}:${META_TAG}
fi'''
script{ script{
env.EXIT_STATUS = 'ABORTED' env.EXIT_STATUS = 'ABORTED'
} }
@@ -671,14 +692,13 @@ pipeline {
]) { ]) {
script{ script{
env.CI_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/index.html' env.CI_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/index.html'
env.CI_JSON_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/report.json'
} }
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
docker pull ghcr.io/linuxserver/ci:latest docker pull ghcr.io/linuxserver/ci:latest
if [ "${MULTIARCH}" == "true" ]; then if [ "${MULTIARCH}" == "true" ]; then
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER}
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm32v7-${META_TAG}
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG} docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi fi
docker run --rm \ docker run --rm \
@@ -697,8 +717,6 @@ pipeline {
-e WEB_SCREENSHOT=\"${CI_WEB}\" \ -e WEB_SCREENSHOT=\"${CI_WEB}\" \
-e WEB_AUTH=\"${CI_AUTH}\" \ -e WEB_AUTH=\"${CI_AUTH}\" \
-e WEB_PATH=\"${CI_WEBPATH}\" \ -e WEB_PATH=\"${CI_WEBPATH}\" \
-e DO_REGION="ams3" \
-e DO_BUCKET="lsio-ci" \
-t ghcr.io/linuxserver/ci:latest \ -t ghcr.io/linuxserver/ci:latest \
python3 test_build.py''' python3 test_build.py'''
} }
@@ -715,12 +733,6 @@ pipeline {
} }
steps { steps {
withCredentials([ withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
usernameVariable: 'DOCKERUSER',
passwordVariable: 'DOCKERPASS'
],
[ [
$class: 'UsernamePasswordMultiBinding', $class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot', credentialsId: 'Quay.io-Robot',
@@ -731,7 +743,7 @@ pipeline {
retry(5) { retry(5) {
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
echo $DOCKERPASS | docker login -u $DOCKERUSER --password-stdin echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
@@ -751,17 +763,6 @@ pipeline {
done done
''' '''
} }
sh '''#! /bin/bash
for DELETEIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do
docker rmi \
${DELETEIMAGE}:${META_TAG} \
${DELETEIMAGE}:${EXT_RELEASE_TAG} \
${DELETEIMAGE}:latest || :
if [ -n "${SEMVER}" ]; then
docker rmi ${DELETEIMAGE}:${SEMVER} || :
fi
done
'''
} }
} }
} }
@@ -773,12 +774,6 @@ pipeline {
} }
steps { steps {
withCredentials([ withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207',
usernameVariable: 'DOCKERUSER',
passwordVariable: 'DOCKERPASS'
],
[ [
$class: 'UsernamePasswordMultiBinding', $class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot', credentialsId: 'Quay.io-Robot',
@@ -789,13 +784,11 @@ pipeline {
retry(5) { retry(5) {
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
echo $DOCKERPASS | docker login -u $DOCKERUSER --password-stdin echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [ "${CI}" == "false" ]; then if [ "${CI}" == "false" ]; then
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER}
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm32v7-${META_TAG}
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG} docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi fi
@@ -803,49 +796,47 @@ pipeline {
docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG}
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-latest docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-latest
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG}
docker tag ${IMAGE}:arm32v7-${META_TAG} ${MANIFESTIMAGE}:arm32v7-${META_TAG}
docker tag ${MANIFESTIMAGE}:arm32v7-${META_TAG} ${MANIFESTIMAGE}:arm32v7-latest
docker tag ${MANIFESTIMAGE}:arm32v7-${META_TAG} ${MANIFESTIMAGE}:arm32v7-${EXT_RELEASE_TAG}
docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-latest docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-latest
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then if [ -n "${SEMVER}" ]; then
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER} docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER}
docker tag ${MANIFESTIMAGE}:arm32v7-${META_TAG} ${MANIFESTIMAGE}:arm32v7-${SEMVER}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER} docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi fi
docker push ${MANIFESTIMAGE}:amd64-${META_TAG} docker push ${MANIFESTIMAGE}:amd64-${META_TAG}
docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG}
docker push ${MANIFESTIMAGE}:amd64-latest docker push ${MANIFESTIMAGE}:amd64-latest
docker push ${MANIFESTIMAGE}:arm32v7-${META_TAG}
docker push ${MANIFESTIMAGE}:arm32v7-latest
docker push ${MANIFESTIMAGE}:arm32v7-${EXT_RELEASE_TAG}
docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG} docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker push ${MANIFESTIMAGE}:arm64v8-latest docker push ${MANIFESTIMAGE}:arm64v8-latest
docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then if [ -n "${SEMVER}" ]; then
docker push ${MANIFESTIMAGE}:amd64-${SEMVER} docker push ${MANIFESTIMAGE}:amd64-${SEMVER}
docker push ${MANIFESTIMAGE}:arm32v7-${SEMVER}
docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER} docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi fi
docker manifest push --purge ${MANIFESTIMAGE}:latest || : docker manifest push --purge ${MANIFESTIMAGE}:latest || :
docker manifest create ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm32v7-latest ${MANIFESTIMAGE}:arm64v8-latest docker manifest create ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
docker manifest annotate ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:arm32v7-latest --os linux --arch arm
docker manifest annotate ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:arm64v8-latest --os linux --arch arm64 --variant v8 docker manifest annotate ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:arm64v8-latest --os linux --arch arm64 --variant v8
docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG} || : docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG} || :
docker manifest create ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm32v7-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} docker manifest create ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker manifest annotate ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:arm32v7-${META_TAG} --os linux --arch arm
docker manifest annotate ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} --os linux --arch arm64 --variant v8 docker manifest annotate ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} --os linux --arch arm64 --variant v8
docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} || : docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} || :
docker manifest create ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm32v7-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} docker manifest create ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
docker manifest annotate ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm32v7-${EXT_RELEASE_TAG} --os linux --arch arm
docker manifest annotate ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} --os linux --arch arm64 --variant v8 docker manifest annotate ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} --os linux --arch arm64 --variant v8
if [ -n "${SEMVER}" ]; then if [ -n "${SEMVER}" ]; then
docker manifest push --purge ${MANIFESTIMAGE}:${SEMVER} || : docker manifest push --purge ${MANIFESTIMAGE}:${SEMVER} || :
docker manifest create ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm32v7-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} docker manifest create ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
docker manifest annotate ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:arm32v7-${SEMVER} --os linux --arch arm
docker manifest annotate ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} --os linux --arch arm64 --variant v8 docker manifest annotate ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} --os linux --arch arm64 --variant v8
fi fi
token=$(curl -sX GET "https://ghcr.io/token?scope=repository%3Alinuxserver%2F${CONTAINER_NAME}%3Apull" | jq -r '.token')
digest=$(curl -s \
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
--header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/linuxserver/${CONTAINER_NAME}/manifests/arm32v7-latest")
if [[ $(echo "$digest" | jq -r '.layers') != "null" ]]; then
docker manifest push --purge ${MANIFESTIMAGE}:arm32v7-latest || :
docker manifest create ${MANIFESTIMAGE}:arm32v7-latest ${MANIFESTIMAGE}:amd64-latest
docker manifest push --purge ${MANIFESTIMAGE}:arm32v7-latest
fi
docker manifest push --purge ${MANIFESTIMAGE}:latest docker manifest push --purge ${MANIFESTIMAGE}:latest
docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG} docker manifest push --purge ${MANIFESTIMAGE}:${META_TAG}
docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} docker manifest push --purge ${MANIFESTIMAGE}:${EXT_RELEASE_TAG}
@@ -855,29 +846,6 @@ pipeline {
done done
''' '''
} }
sh '''#! /bin/bash
for DELETEIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do
docker rmi \
${DELETEIMAGE}:amd64-${META_TAG} \
${DELETEIMAGE}:amd64-latest \
${DELETEIMAGE}:amd64-${EXT_RELEASE_TAG} \
${DELETEIMAGE}:arm32v7-${META_TAG} \
${DELETEIMAGE}:arm32v7-latest \
${DELETEIMAGE}:arm32v7-${EXT_RELEASE_TAG} \
${DELETEIMAGE}:arm64v8-${META_TAG} \
${DELETEIMAGE}:arm64v8-latest \
${DELETEIMAGE}:arm64v8-${EXT_RELEASE_TAG} || :
if [ -n "${SEMVER}" ]; then
docker rmi \
${DELETEIMAGE}:amd64-${SEMVER} \
${DELETEIMAGE}:arm32v7-${SEMVER} \
${DELETEIMAGE}:arm64v8-${SEMVER} || :
fi
done
docker rmi \
ghcr.io/linuxserver/lsiodev-buildcache:arm32v7-${COMMIT_SHA}-${BUILD_NUMBER} \
ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} || :
'''
} }
} }
} }
@@ -911,49 +879,117 @@ pipeline {
curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done''' curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done'''
} }
} }
// Use helper container to sync the current README on master to the dockerhub endpoint // Add protection to the release branch
stage('Sync-README') { stage('Github-Release-Branch-Protection') {
when { when {
branch "master"
environment name: 'CHANGE_ID', value: '' environment name: 'CHANGE_ID', value: ''
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ echo "Setting up protection for release branch master"
[ sh '''#! /bin/bash
$class: 'UsernamePasswordMultiBinding', curl -H "Authorization: token ${GITHUB_TOKEN}" -X PUT https://api.github.com/repos/${LS_USER}/${LS_REPO}/branches/master/protection \
credentialsId: '3f9ba4d5-100d-45b0-a3c4-633fd6061207', -d $(jq -c . << EOF
usernameVariable: 'DOCKERUSER', {
passwordVariable: 'DOCKERPASS' "required_status_checks": null,
] "enforce_admins": false,
]) { "required_pull_request_reviews": {
sh '''#! /bin/bash "dismiss_stale_reviews": false,
set -e "require_code_owner_reviews": false,
TEMPDIR=$(mktemp -d) "require_last_push_approval": false,
docker pull ghcr.io/linuxserver/jenkins-builder:latest "required_approving_review_count": 1
docker run --rm -e CONTAINER_NAME=${CONTAINER_NAME} -e GITHUB_BRANCH="${BRANCH_NAME}" -v ${TEMPDIR}:/ansible/jenkins ghcr.io/linuxserver/jenkins-builder:latest },
docker pull ghcr.io/linuxserver/readme-sync "restrictions": null,
docker run --rm=true \ "required_linear_history": false,
-e DOCKERHUB_USERNAME=$DOCKERUSER \ "allow_force_pushes": false,
-e DOCKERHUB_PASSWORD=$DOCKERPASS \ "allow_deletions": false,
-e GIT_REPOSITORY=${LS_USER}/${LS_REPO} \ "block_creations": false,
-e DOCKER_REPOSITORY=${IMAGE} \ "required_conversation_resolution": true,
-e GIT_BRANCH=master \ "lock_branch": false,
-v ${TEMPDIR}/docker-${CONTAINER_NAME}:/mnt \ "allow_fork_syncing": false,
ghcr.io/linuxserver/readme-sync bash -c 'node sync' "required_signatures": false
rm -Rf ${TEMPDIR} ''' }
} EOF
) '''
} }
} }
// If this is a Pull request send the CI link as a comment on it // If this is a Pull request send the CI link as a comment on it
stage('Pull Request Comment') { stage('Pull Request Comment') {
when { when {
not {environment name: 'CHANGE_ID', value: ''} not {environment name: 'CHANGE_ID', value: ''}
environment name: 'CI', value: 'true'
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
sh '''curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/issues/${PULL_REQUEST}/comments \ sh '''#! /bin/bash
-d '{"body": "I am a bot, here are the test results for this PR: \\n'${CI_URL}' \\n'${SHELLCHECK_URL}'"}' ''' # Function to retrieve JSON data from URL
get_json() {
local url="$1"
local response=$(curl -s "$url")
if [ $? -ne 0 ]; then
echo "Failed to retrieve JSON data from $url"
return 1
fi
local json=$(echo "$response" | jq .)
if [ $? -ne 0 ]; then
echo "Failed to parse JSON data from $url"
return 1
fi
echo "$json"
}
build_table() {
local data="$1"
# Get the keys in the JSON data
local keys=$(echo "$data" | jq -r 'to_entries | map(.key) | .[]')
# Check if keys are empty
if [ -z "$keys" ]; then
echo "JSON report data does not contain any keys or the report does not exist."
return 1
fi
# Build table header
local header="| Tag | Passed |\\n| --- | --- |\\n"
# Loop through the JSON data to build the table rows
local rows=""
for build in $keys; do
local status=$(echo "$data" | jq -r ".[\\"$build\\"].test_success")
if [ "$status" = "true" ]; then
status="✅"
else
status="❌"
fi
local row="| "$build" | "$status" |\\n"
rows="${rows}${row}"
done
local table="${header}${rows}"
local escaped_table=$(echo "$table" | sed 's/\"/\\\\"/g')
echo "$escaped_table"
}
if [[ "${CI}" = "true" ]]; then
# Retrieve JSON data from URL
data=$(get_json "$CI_JSON_URL")
# Create table from JSON data
table=$(build_table "$data")
echo -e "$table"
curl -X POST -H "Authorization: token $GITHUB_TOKEN" \
-H "Accept: application/vnd.github.v3+json" \
"https://api.github.com/repos/$LS_USER/$LS_REPO/issues/$PULL_REQUEST/comments" \
-d "{\\"body\\": \\"I am a bot, here are the test results for this PR: \\n${CI_URL}\\n${SHELLCHECK_URL}\\n${table}\\"}"
else
curl -X POST -H "Authorization: token $GITHUB_TOKEN" \
-H "Accept: application/vnd.github.v3+json" \
"https://api.github.com/repos/$LS_USER/$LS_REPO/issues/$PULL_REQUEST/comments" \
-d "{\\"body\\": \\"I am a bot, here is the pushed image/manifest for this PR: \\n\\n\\`${GITHUBIMAGE}:${META_TAG}\\`\\"}"
fi
'''
} }
} }
} }
@@ -979,6 +1015,14 @@ pipeline {
} }
} }
cleanup { cleanup {
sh '''#! /bin/bash
echo "Performing docker system prune!!"
containers=$(docker ps -aq)
if [[ -n "${containers}" ]]; then
docker stop ${containers}
fi
docker system prune -af --volumes || :
'''
cleanWs() cleanWs()
} }
} }

143
README.md
View File

@@ -1,6 +1,5 @@
<!-- DO NOT EDIT THIS FILE MANUALLY --> <!-- DO NOT EDIT THIS FILE MANUALLY -->
<!-- Please read the https://github.com/linuxserver/docker-code-server/blob/master/.github/CONTRIBUTING.md --> <!-- Please read https://github.com/linuxserver/docker-code-server/blob/master/.github/CONTRIBUTING.md -->
[![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io) [![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io)
[![Blog](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Blog)](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!") [![Blog](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Blog)](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!")
@@ -52,7 +51,7 @@ Find us at:
## Supported Architectures ## Supported Architectures
We utilise the docker manifest for multi-platform awareness. More information is available from docker [here](https://github.com/docker/distribution/blob/master/docs/spec/manifest-v2-2.md#manifest-list) and our announcement [here](https://blog.linuxserver.io/2019/02/21/the-lsio-pipeline-project/). We utilise the docker manifest for multi-platform awareness. More information is available from docker [here](https://distribution.github.io/distribution/spec/manifest-v2-2/#manifest-list) and our announcement [here](https://blog.linuxserver.io/2019/02/21/the-lsio-pipeline-project/).
Simply pulling `lscr.io/linuxserver/code-server:latest` should retrieve the correct image for your arch, but you can also pull specific arch images via tags. Simply pulling `lscr.io/linuxserver/code-server:latest` should retrieve the correct image for your arch, but you can also pull specific arch images via tags.
@@ -62,7 +61,7 @@ The architectures supported by this image are:
| :----: | :----: | ---- | | :----: | :----: | ---- |
| x86-64 | ✅ | amd64-\<version tag\> | | x86-64 | ✅ | amd64-\<version tag\> |
| arm64 | ✅ | arm64v8-\<version tag\> | | arm64 | ✅ | arm64v8-\<version tag\> |
| armhf | | arm32v7-\<version tag\> | | armhf | | |
## Application Setup ## Application Setup
@@ -81,13 +80,12 @@ How to create the [hashed password](https://github.com/cdr/code-server/blob/mast
## Usage ## Usage
Here are some example snippets to help you get started creating a container. To help you get started creating a container from this image you can either use docker-compose or the docker cli.
### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose)) ### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose))
```yaml ```yaml
--- ---
version: "2.1"
services: services:
code-server: code-server:
image: lscr.io/linuxserver/code-server:latest image: lscr.io/linuxserver/code-server:latest
@@ -127,12 +125,11 @@ docker run -d \
-v /path/to/appdata/config:/config \ -v /path/to/appdata/config:/config \
--restart unless-stopped \ --restart unless-stopped \
lscr.io/linuxserver/code-server:latest lscr.io/linuxserver/code-server:latest
``` ```
## Parameters ## Parameters
Container images are configured using parameters passed at runtime (such as those above). These parameters are separated by a colon and indicate `<external>:<internal>` respectively. For example, `-p 8080:80` would expose port `80` from inside the container to be accessible from the host's IP on port `8080` outside the container. Containers are configured using parameters passed at runtime (such as those above). These parameters are separated by a colon and indicate `<external>:<internal>` respectively. For example, `-p 8080:80` would expose port `80` from inside the container to be accessible from the host's IP on port `8080` outside the container.
| Parameter | Function | | Parameter | Function |
| :----: | --- | | :----: | --- |
@@ -155,10 +152,10 @@ You can set any environment variable from a file by using a special prepend `FIL
As an example: As an example:
```bash ```bash
-e FILE__PASSWORD=/run/secrets/mysecretpassword -e FILE__MYVAR=/run/secrets/mysecretvariable
``` ```
Will set the environment variable `PASSWORD` based on the contents of the `/run/secrets/mysecretpassword` file. Will set the environment variable `MYVAR` based on the contents of the `/run/secrets/mysecretvariable` file.
## Umask for running applications ## Umask for running applications
@@ -167,15 +164,20 @@ Keep in mind umask is not chmod it subtracts from permissions based on it's valu
## User / Group Identifiers ## User / Group Identifiers
When using volumes (`-v` flags) permissions issues can arise between the host OS and the container, we avoid this issue by allowing you to specify the user `PUID` and group `PGID`. When using volumes (`-v` flags), permissions issues can arise between the host OS and the container, we avoid this issue by allowing you to specify the user `PUID` and group `PGID`.
Ensure any volume directories on the host are owned by the same user you specify and any permissions issues will vanish like magic. Ensure any volume directories on the host are owned by the same user you specify and any permissions issues will vanish like magic.
In this instance `PUID=1000` and `PGID=1000`, to find yours use `id user` as below: In this instance `PUID=1000` and `PGID=1000`, to find yours use `id your_user` as below:
```bash ```bash
$ id username id your_user
uid=1000(dockeruser) gid=1000(dockergroup) groups=1000(dockergroup) ```
Example output:
```text
uid=1000(your_user) gid=1000(your_user) groups=1000(your_user)
``` ```
## Docker Mods ## Docker Mods
@@ -186,53 +188,100 @@ We publish various [Docker Mods](https://github.com/linuxserver/docker-mods) to
## Support Info ## Support Info
* Shell access whilst the container is running: `docker exec -it code-server /bin/bash` * Shell access whilst the container is running:
* To monitor the logs of the container in realtime: `docker logs -f code-server`
* container version number ```bash
* `docker inspect -f '{{ index .Config.Labels "build_version" }}' code-server` docker exec -it code-server /bin/bash
* image version number ```
* `docker inspect -f '{{ index .Config.Labels "build_version" }}' lscr.io/linuxserver/code-server:latest`
* To monitor the logs of the container in realtime:
```bash
docker logs -f code-server
```
* Container version number:
```bash
docker inspect -f '{{ index .Config.Labels "build_version" }}' code-server
```
* Image version number:
```bash
docker inspect -f '{{ index .Config.Labels "build_version" }}' lscr.io/linuxserver/code-server:latest
```
## Updating Info ## Updating Info
Most of our images are static, versioned, and require an image update and container recreation to update the app inside. With some exceptions (ie. nextcloud, plex), we do not recommend or support updating apps inside the container. Please consult the [Application Setup](#application-setup) section above to see if it is recommended for the image. Most of our images are static, versioned, and require an image update and container recreation to update the app inside. With some exceptions (noted in the relevant readme.md), we do not recommend or support updating apps inside the container. Please consult the [Application Setup](#application-setup) section above to see if it is recommended for the image.
Below are the instructions for updating containers: Below are the instructions for updating containers:
### Via Docker Compose ### Via Docker Compose
* Update all images: `docker-compose pull` * Update images:
* or update a single image: `docker-compose pull code-server` * All images:
* Let compose update all containers as necessary: `docker-compose up -d`
* or update a single container: `docker-compose up -d code-server` ```bash
* You can also remove the old dangling images: `docker image prune` docker-compose pull
```
* Single image:
```bash
docker-compose pull code-server
```
* Update containers:
* All containers:
```bash
docker-compose up -d
```
* Single container:
```bash
docker-compose up -d code-server
```
* You can also remove the old dangling images:
```bash
docker image prune
```
### Via Docker Run ### Via Docker Run
* Update the image: `docker pull lscr.io/linuxserver/code-server:latest` * Update the image:
* Stop the running container: `docker stop code-server`
* Delete the container: `docker rm code-server` ```bash
docker pull lscr.io/linuxserver/code-server:latest
```
* Stop the running container:
```bash
docker stop code-server
```
* Delete the container:
```bash
docker rm code-server
```
* Recreate a new container with the same docker run parameters as instructed above (if mapped correctly to a host folder, your `/config` folder and settings will be preserved) * Recreate a new container with the same docker run parameters as instructed above (if mapped correctly to a host folder, your `/config` folder and settings will be preserved)
* You can also remove the old dangling images: `docker image prune` * You can also remove the old dangling images:
### Via Watchtower auto-updater (only use if you don't remember the original parameters) ```bash
docker image prune
* Pull the latest image at its tag and replace it with the same env variables in one run: ```
```bash
docker run --rm \
-v /var/run/docker.sock:/var/run/docker.sock \
containrrr/watchtower \
--run-once code-server
```
* You can also remove the old dangling images: `docker image prune`
**Note:** We do not endorse the use of Watchtower as a solution to automated updates of existing Docker containers. In fact we generally discourage automated updates. However, this is a useful tool for one-time manual updates of containers where you have forgotten the original parameters. In the long term, we highly recommend using [Docker Compose](https://docs.linuxserver.io/general/docker-compose).
### Image Update Notifications - Diun (Docker Image Update Notifier) ### Image Update Notifications - Diun (Docker Image Update Notifier)
* We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported. **tip**: We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported.
## Building locally ## Building locally
@@ -257,6 +306,8 @@ Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64
## Versions ## Versions
* **21.04.23:** - Let server listen on both ipv4 and ipv6.
* **01.07.23:** - Deprecate armhf. As announced [here](https://www.linuxserver.io/blog/a-farewell-to-arm-hf)
* **05.10.22:** - Install recommended deps to maintain parity with the older images. * **05.10.22:** - Install recommended deps to maintain parity with the older images.
* **29.09.22:** - Rebase to jammy, switch to s6v3. Fix chown logic to skip `/config/workspace` contents. * **29.09.22:** - Rebase to jammy, switch to s6v3. Fix chown logic to skip `/config/workspace` contents.
* **20.02.22:** - Install using the official tarballs. * **20.02.22:** - Install using the official tarballs.

File diff suppressed because it is too large Load Diff

View File

@@ -4,58 +4,39 @@
project_name: code-server project_name: code-server
project_url: "https://coder.com" project_url: "https://coder.com"
project_logo: "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/code-server-banner.png" project_logo: "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/code-server-banner.png"
project_blurb: "[{{ project_name|capitalize }}]({{ project_url }}) is VS Code running on a remote server, accessible through the browser. project_blurb: "[{{ project_name|capitalize }}]({{ project_url }}) is VS Code running on a remote server, accessible through the browser.\n- Code on your Chromebook, tablet, and laptop with a consistent dev environment.\n- If you have a Windows or Mac workstation, more easily develop for Linux.\n- Take advantage of large cloud servers to speed up tests, compilations, downloads, and more.\n- Preserve battery life when you're on the go.\n- All intensive computation runs on your server.\n- You're no longer running excess instances of Chrome."
- Code on your Chromebook, tablet, and laptop with a consistent dev environment.
- If you have a Windows or Mac workstation, more easily develop for Linux.
- Take advantage of large cloud servers to speed up tests, compilations, downloads, and more.
- Preserve battery life when you're on the go.
- All intensive computation runs on your server.
- You're no longer running excess instances of Chrome."
project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}" project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}"
# supported architectures # supported architectures
available_architectures: available_architectures:
- { arch: "{{ arch_x86_64 }}", tag: "amd64-latest"} - {arch: "{{ arch_x86_64 }}", tag: "amd64-latest"}
- { arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"} - {arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"}
- { arch: "{{ arch_armhf }}", tag: "arm32v7-latest"}
# development version # development version
development_versions: false development_versions: false
development_versions_items: development_versions_items:
- { tag: "latest", desc: "Stable releases" } - {tag: "latest", desc: "Stable releases"}
# container parameters # container parameters
common_param_env_vars_enabled: true common_param_env_vars_enabled: true
param_container_name: "{{ project_name }}" param_container_name: "{{ project_name }}"
param_usage_include_vols: true param_usage_include_vols: true
param_volumes: param_volumes:
- { vol_path: "/config", vol_host_path: "/path/to/appdata/config", desc: "Contains all relevant configuration files." } - {vol_path: "/config", vol_host_path: "/path/to/appdata/config", desc: "Contains all relevant configuration files."}
param_usage_include_ports: true param_usage_include_ports: true
param_ports: param_ports:
- { external_port: "8443", internal_port: "8443", port_desc: "web gui" } - {external_port: "8443", internal_port: "8443", port_desc: "web gui"}
param_usage_include_env: true param_usage_include_env: true
param_env_vars: param_env_vars:
- { env_var: "TZ", env_value: "Europe/London", desc: "Specify a timezone to use EG Europe/London" } - {env_var: "TZ", env_value: "Europe/London", desc: "Specify a timezone to use EG Europe/London"}
# optional container parameters # optional container parameters
opt_param_usage_include_env: true opt_param_usage_include_env: true
opt_param_env_vars: opt_param_env_vars:
- { env_var: "PASSWORD", env_value: "password", desc: "Optional web gui password, if `PASSWORD` or `HASHED_PASSWORD` is not provided, there will be no auth." } - {env_var: "PASSWORD", env_value: "password", desc: "Optional web gui password, if `PASSWORD` or `HASHED_PASSWORD` is not provided, there will be no auth."}
- { env_var: "HASHED_PASSWORD", env_value: "", desc: "Optional web gui password, overrides `PASSWORD`, instructions on how to create it is below." } - {env_var: "HASHED_PASSWORD", env_value: "", desc: "Optional web gui password, overrides `PASSWORD`, instructions on how to create it is below."}
- { env_var: "SUDO_PASSWORD", env_value: "password", desc: "If this optional variable is set, user will have sudo access in the code-server terminal with the specified password." } - {env_var: "SUDO_PASSWORD", env_value: "password", desc: "If this optional variable is set, user will have sudo access in the code-server terminal with the specified password."}
- { env_var: "SUDO_PASSWORD_HASH", env_value: "", desc: "Optionally set sudo password via hash (takes priority over `SUDO_PASSWORD` var). Format is `$type$salt$hashed`." } - {env_var: "SUDO_PASSWORD_HASH", env_value: "", desc: "Optionally set sudo password via hash (takes priority over `SUDO_PASSWORD` var). Format is `$type$salt$hashed`."}
- { env_var: "PROXY_DOMAIN", env_value: "code-server.my.domain", desc: "If this optional variable is set, this domain will be proxied for subdomain proxying. See [Documentation](https://github.com/cdr/code-server/blob/master/docs/FAQ.md#sub-domains)" } - {env_var: "PROXY_DOMAIN", env_value: "code-server.my.domain", desc: "If this optional variable is set, this domain will be proxied for subdomain proxying. See [Documentation](https://github.com/cdr/code-server/blob/master/docs/FAQ.md#sub-domains)"}
- { env_var: "DEFAULT_WORKSPACE", env_value: "/config/workspace", desc: "If this optional variable is set, code-server will open this directory by default" } - {env_var: "DEFAULT_WORKSPACE", env_value: "/config/workspace", desc: "If this optional variable is set, code-server will open this directory by default"}
optional_block_1: false optional_block_1: false
optional_block_1_items: "" optional_block_1_items: ""
# application setup block # application setup block
app_setup_block_enabled: true app_setup_block_enabled: true
app_setup_block: | app_setup_block: |
@@ -71,30 +52,30 @@ app_setup_block: |
### Hashed code-server password ### Hashed code-server password
How to create the [hashed password](https://github.com/cdr/code-server/blob/master/docs/FAQ.md#can-i-store-my-password-hashed). How to create the [hashed password](https://github.com/cdr/code-server/blob/master/docs/FAQ.md#can-i-store-my-password-hashed).
# changelog # changelog
changelogs: changelogs:
- { date: "05.10.22:", desc: "Install recommended deps to maintain parity with the older images." } - {date: "21.04.23:", desc: "Let server listen on both ipv4 and ipv6."}
- { date: "29.09.22:", desc: "Rebase to jammy, switch to s6v3. Fix chown logic to skip `/config/workspace` contents." } - {date: "01.07.23:", desc: "Deprecate armhf. As announced [here](https://www.linuxserver.io/blog/a-farewell-to-arm-hf)"}
- { date: "20.02.22:", desc: "Install using the official tarballs." } - {date: "05.10.22:", desc: "Install recommended deps to maintain parity with the older images."}
- { date: "29.12.21:", desc: "Add `install-extension` as a helper for mods to install extensions." } - {date: "29.09.22:", desc: "Rebase to jammy, switch to s6v3. Fix chown logic to skip `/config/workspace` contents."}
- { date: "06.12.21:", desc: "Add `DEFAULT_WORKSPACE` env var." } - {date: "20.02.22:", desc: "Install using the official tarballs."}
- { date: "29.11.21:", desc: "Rebase to Ubuntu focal." } - {date: "29.12.21:", desc: "Add `install-extension` as a helper for mods to install extensions."}
- { date: "16.09.21:", desc: "Fix slow `chown` on large workspace (contents of workspace folder no longer chowned)." } - {date: "06.12.21:", desc: "Add `DEFAULT_WORKSPACE` env var."}
- { date: "11.07.21:", desc: "Bump node to 14 to fix builds" } - {date: "29.11.21:", desc: "Rebase to Ubuntu focal."}
- { date: "08.05.21:", desc: "Fix doc link" } - {date: "16.09.21:", desc: "Fix slow `chown` on large workspace (contents of workspace folder no longer chowned)."}
- { date: "04.02.20:", desc: "Allow setting gui password via hash using env var `HASHED_PASSWORD`." } - {date: "11.07.21:", desc: "Bump node to 14 to fix builds"}
- { date: "23.12.20:", desc: "Allow setting sudo password via hash using env var `SUDO_PASSWORD_HASH`." } - {date: "08.05.21:", desc: "Fix doc link"}
- { date: "29.05.20:", desc: "Add --domain-proxy support." } - {date: "04.02.20:", desc: "Allow setting gui password via hash using env var `HASHED_PASSWORD`."}
- { date: "21.05.20:", desc: "Shrink images, install via yarn, fix arm32v7 build." } - {date: "23.12.20:", desc: "Allow setting sudo password via hash using env var `SUDO_PASSWORD_HASH`."}
- { date: "18.05.20:", desc: "Switch to multi-arch images, install via npm." } - {date: "29.05.20:", desc: "Add --domain-proxy support."}
- { date: "29.04.20:", desc: "Update start arguments." } - {date: "21.05.20:", desc: "Shrink images, install via yarn, fix arm32v7 build."}
- { date: "01.04.20:", desc: "Structural changes required for v3." } - {date: "18.05.20:", desc: "Switch to multi-arch images, install via npm."}
- { date: "17.01.20:", desc: "Fix artifact url retrieval from github." } - {date: "29.04.20:", desc: "Update start arguments."}
- { date: "24.10.19:", desc: "Upgrade to v2 builds." } - {date: "01.04.20:", desc: "Structural changes required for v3."}
- { date: "28.09.19:", desc: "Update project logo." } - {date: "17.01.20:", desc: "Fix artifact url retrieval from github."}
- { date: "21.09.19:", desc: "Add development builds/tag." } - {date: "24.10.19:", desc: "Upgrade to v2 builds."}
- { date: "09.07.19:", desc: "Add optional sudo access." } - {date: "28.09.19:", desc: "Update project logo."}
- { date: "01.07.19:", desc: "Add nano." } - {date: "21.09.19:", desc: "Add development builds/tag."}
- { date: "24.06.19:", desc: "Initial Release." } - {date: "09.07.19:", desc: "Add optional sudo access."}
- {date: "01.07.19:", desc: "Add nano."}
- {date: "24.06.19:", desc: "Initial Release."}

View File

@@ -17,7 +17,7 @@ exec \
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z 127.0.0.1 8443" \ s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z 127.0.0.1 8443" \
s6-setuidgid abc \ s6-setuidgid abc \
/app/code-server/bin/code-server \ /app/code-server/bin/code-server \
--bind-addr 0.0.0.0:8443 \ --bind-addr "[::]:8443" \
--user-data-dir /config/data \ --user-data-dir /config/data \
--extensions-dir /config/extensions \ --extensions-dir /config/extensions \
--disable-telemetry \ --disable-telemetry \