servarr-stack – Blame information for rev 1
?pathlinks?
Rev | Author | Line No. | Line |
---|---|---|---|
1 | office | 1 | pipeline { |
2 | agent { |
||
3 | label 'X86-64-MULTI' |
||
4 | } |
||
5 | options { |
||
6 | buildDiscarder(logRotator(numToKeepStr: '10', daysToKeepStr: '60')) |
||
7 | parallelsAlwaysFailFast() |
||
8 | } |
||
9 | // Input to determine if this is a package check |
||
10 | parameters { |
||
11 | string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK') |
||
12 | } |
||
13 | // Configuration for the variables used for this specific repo |
||
14 | environment { |
||
15 | BUILDS_DISCORD=credentials('build_webhook_url') |
||
16 | GITHUB_TOKEN=credentials('498b4638-2d02-4ce5-832d-8a57d01d97ab') |
||
17 | GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0') |
||
18 | GITLAB_NAMESPACE=credentials('gitlab-namespace-id') |
||
19 | DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat') |
||
20 | QUAYIO_API_TOKEN=credentials('quayio-repo-api-token') |
||
21 | GIT_SIGNING_KEY=credentials('484fbca6-9a4f-455e-b9e3-97ac98785f5f') |
||
22 | BUILD_VERSION_ARG = 'READARR_RELEASE' |
||
23 | LS_USER = 'linuxserver' |
||
24 | LS_REPO = 'docker-readarr' |
||
25 | CONTAINER_NAME = 'readarr' |
||
26 | DOCKERHUB_IMAGE = 'linuxserver/readarr' |
||
27 | DEV_DOCKERHUB_IMAGE = 'lsiodev/readarr' |
||
28 | PR_DOCKERHUB_IMAGE = 'lspipepr/readarr' |
||
29 | DIST_IMAGE = 'alpine' |
||
30 | MULTIARCH='true' |
||
31 | CI='true' |
||
32 | CI_WEB='true' |
||
33 | CI_PORT='8787' |
||
34 | CI_SSL='false' |
||
35 | CI_DELAY='240' |
||
36 | CI_DOCKERENV='' |
||
37 | CI_AUTH='' |
||
38 | CI_WEBPATH='/system/status' |
||
39 | } |
||
40 | stages { |
||
41 | stage("Set git config"){ |
||
42 | steps{ |
||
43 | sh '''#!/bin/bash |
||
44 | cat ${GIT_SIGNING_KEY} > /config/.ssh/id_sign |
||
45 | chmod 600 /config/.ssh/id_sign |
||
46 | ssh-keygen -y -f /config/.ssh/id_sign > /config/.ssh/id_sign.pub |
||
47 | echo "Using $(ssh-keygen -lf /config/.ssh/id_sign) to sign commits" |
||
48 | git config --global gpg.format ssh |
||
49 | git config --global user.signingkey /config/.ssh/id_sign |
||
50 | git config --global commit.gpgsign true |
||
51 | ''' |
||
52 | } |
||
53 | } |
||
54 | // Setup all the basic environment variables needed for the build |
||
55 | stage("Set ENV Variables base"){ |
||
56 | steps{ |
||
57 | echo "Running on node: ${NODE_NAME}" |
||
58 | sh '''#! /bin/bash |
||
59 | echo "Pruning builder" |
||
60 | docker builder prune -f --builder container || : |
||
61 | containers=$(docker ps -q) |
||
62 | if [[ -n "${containers}" ]]; then |
||
63 | BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit') |
||
64 | for container in ${containers}; do |
||
65 | if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then |
||
66 | echo "skipping buildx container in docker stop" |
||
67 | else |
||
68 | echo "Stopping container ${container}" |
||
69 | docker stop ${container} |
||
70 | fi |
||
71 | done |
||
72 | fi |
||
73 | docker system prune -f --volumes || : |
||
74 | docker image prune -af || : |
||
75 | ''' |
||
76 | script{ |
||
77 | env.EXIT_STATUS = '' |
||
78 | env.LS_RELEASE = sh( |
||
79 | script: '''docker run --rm quay.io/skopeo/stable:v1 inspect docker://ghcr.io/${LS_USER}/${CONTAINER_NAME}:develop 2>/dev/null | jq -r '.Labels.build_version' | awk '{print $3}' | grep '\\-ls' || : ''', |
||
80 | returnStdout: true).trim() |
||
81 | env.LS_RELEASE_NOTES = sh( |
||
82 | script: '''cat readme-vars.yml | awk -F \\" '/date: "[0-9][0-9].[0-9][0-9].[0-9][0-9]:/ {print $4;exit;}' | sed -E ':a;N;$!ba;s/\\r{0,1}\\n/\\\\n/g' ''', |
||
83 | returnStdout: true).trim() |
||
84 | env.GITHUB_DATE = sh( |
||
85 | script: '''date '+%Y-%m-%dT%H:%M:%S%:z' ''', |
||
86 | returnStdout: true).trim() |
||
87 | env.COMMIT_SHA = sh( |
||
88 | script: '''git rev-parse HEAD''', |
||
89 | returnStdout: true).trim() |
||
90 | env.GH_DEFAULT_BRANCH = sh( |
||
91 | script: '''git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||' ''', |
||
92 | returnStdout: true).trim() |
||
93 | env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT |
||
94 | env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/' |
||
95 | env.PULL_REQUEST = env.CHANGE_ID |
||
96 | env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./root/donate.txt' |
||
97 | } |
||
98 | sh '''#! /bin/bash |
||
99 | echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" ''' |
||
100 | script{ |
||
101 | env.LS_RELEASE_NUMBER = sh( |
||
102 | script: '''echo ${LS_RELEASE} |sed 's/^.*-ls//g' ''', |
||
103 | returnStdout: true).trim() |
||
104 | } |
||
105 | script{ |
||
106 | env.LS_TAG_NUMBER = sh( |
||
107 | script: '''#! /bin/bash |
||
108 | tagsha=$(git rev-list -n 1 develop-${LS_RELEASE} 2>/dev/null) |
||
109 | if [ "${tagsha}" == "${COMMIT_SHA}" ]; then |
||
110 | echo ${LS_RELEASE_NUMBER} |
||
111 | elif [ -z "${GIT_COMMIT}" ]; then |
||
112 | echo ${LS_RELEASE_NUMBER} |
||
113 | else |
||
114 | echo $((${LS_RELEASE_NUMBER} + 1)) |
||
115 | fi''', |
||
116 | returnStdout: true).trim() |
||
117 | } |
||
118 | } |
||
119 | } |
||
120 | /* ####################### |
||
121 | Package Version Tagging |
||
122 | ####################### */ |
||
123 | // Grab the current package versions in Git to determine package tag |
||
124 | stage("Set Package tag"){ |
||
125 | steps{ |
||
126 | script{ |
||
127 | env.PACKAGE_TAG = sh( |
||
128 | script: '''#!/bin/bash |
||
129 | if [ -e package_versions.txt ] ; then |
||
130 | cat package_versions.txt | md5sum | cut -c1-8 |
||
131 | else |
||
132 | echo none |
||
133 | fi''', |
||
134 | returnStdout: true).trim() |
||
135 | } |
||
136 | } |
||
137 | } |
||
138 | /* ######################## |
||
139 | External Release Tagging |
||
140 | ######################## */ |
||
141 | // If this is a custom command to determine version use that command |
||
142 | stage("Set tag custom bash"){ |
||
143 | steps{ |
||
144 | script{ |
||
145 | env.EXT_RELEASE = sh( |
||
146 | script: ''' curl -sL https://readarr.servarr.com/v1/update/develop/changes?runtime=netcore%26os=linuxmusl | jq -r '.[0].version' ''', |
||
147 | returnStdout: true).trim() |
||
148 | env.RELEASE_LINK = 'custom_command' |
||
149 | } |
||
150 | } |
||
151 | } |
||
152 | // Sanitize the release tag and strip illegal docker or github characters |
||
153 | stage("Sanitize tag"){ |
||
154 | steps{ |
||
155 | script{ |
||
156 | env.EXT_RELEASE_CLEAN = sh( |
||
157 | script: '''echo ${EXT_RELEASE} | sed 's/[~,%@+;:/ ]//g' ''', |
||
158 | returnStdout: true).trim() |
||
159 | |||
160 | def semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)\.(\d+)/ |
||
161 | if (semver.find()) { |
||
162 | env.SEMVER = "${semver[0][1]}.${semver[0][2]}.${semver[0][3]}" |
||
163 | } else { |
||
164 | semver = env.EXT_RELEASE_CLEAN =~ /(\d+)\.(\d+)(?:\.(\d+))?(.*)/ |
||
165 | if (semver.find()) { |
||
166 | if (semver[0][3]) { |
||
167 | env.SEMVER = "${semver[0][1]}.${semver[0][2]}.${semver[0][3]}" |
||
168 | } else if (!semver[0][3] && !semver[0][4]) { |
||
169 | env.SEMVER = "${semver[0][1]}.${semver[0][2]}.${(new Date()).format('YYYYMMdd')}" |
||
170 | } |
||
171 | } |
||
172 | } |
||
173 | |||
174 | if (env.SEMVER != null) { |
||
175 | if (BRANCH_NAME != "${env.GH_DEFAULT_BRANCH}") { |
||
176 | env.SEMVER = "${env.SEMVER}-${BRANCH_NAME}" |
||
177 | } |
||
178 | println("SEMVER: ${env.SEMVER}") |
||
179 | } else { |
||
180 | println("No SEMVER detected") |
||
181 | } |
||
182 | |||
183 | } |
||
184 | } |
||
185 | } |
||
186 | // If this is a develop build use live docker endpoints |
||
187 | stage("Set ENV live build"){ |
||
188 | when { |
||
189 | branch "develop" |
||
190 | environment name: 'CHANGE_ID', value: '' |
||
191 | } |
||
192 | steps { |
||
193 | script{ |
||
194 | env.IMAGE = env.DOCKERHUB_IMAGE |
||
195 | env.GITHUBIMAGE = 'ghcr.io/' + env.LS_USER + '/' + env.CONTAINER_NAME |
||
196 | env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/' + env.CONTAINER_NAME |
||
197 | env.QUAYIMAGE = 'quay.io/linuxserver.io/' + env.CONTAINER_NAME |
||
198 | if (env.MULTIARCH == 'true') { |
||
199 | env.CI_TAGS = 'amd64-develop-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER + '|arm64v8-develop-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER |
||
200 | } else { |
||
201 | env.CI_TAGS = 'develop-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER |
||
202 | } |
||
203 | env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER |
||
204 | env.META_TAG = 'develop-' + env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER |
||
205 | env.EXT_RELEASE_TAG = 'develop-version-' + env.EXT_RELEASE_CLEAN |
||
206 | env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache' |
||
207 | } |
||
208 | } |
||
209 | } |
||
210 | // If this is a dev build use dev docker endpoints |
||
211 | stage("Set ENV dev build"){ |
||
212 | when { |
||
213 | not {branch "develop"} |
||
214 | environment name: 'CHANGE_ID', value: '' |
||
215 | } |
||
216 | steps { |
||
217 | script{ |
||
218 | env.IMAGE = env.DEV_DOCKERHUB_IMAGE |
||
219 | env.GITHUBIMAGE = 'ghcr.io/' + env.LS_USER + '/lsiodev-' + env.CONTAINER_NAME |
||
220 | env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lsiodev-' + env.CONTAINER_NAME |
||
221 | env.QUAYIMAGE = 'quay.io/linuxserver.io/lsiodev-' + env.CONTAINER_NAME |
||
222 | if (env.MULTIARCH == 'true') { |
||
223 | env.CI_TAGS = 'amd64-develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '|arm64v8-develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA |
||
224 | } else { |
||
225 | env.CI_TAGS = 'develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA |
||
226 | } |
||
227 | env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA |
||
228 | env.META_TAG = 'develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA |
||
229 | env.EXT_RELEASE_TAG = 'develop-version-' + env.EXT_RELEASE_CLEAN |
||
230 | env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/' |
||
231 | env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache' |
||
232 | } |
||
233 | } |
||
234 | } |
||
235 | // If this is a pull request build use dev docker endpoints |
||
236 | stage("Set ENV PR build"){ |
||
237 | when { |
||
238 | not {environment name: 'CHANGE_ID', value: ''} |
||
239 | } |
||
240 | steps { |
||
241 | script{ |
||
242 | env.IMAGE = env.PR_DOCKERHUB_IMAGE |
||
243 | env.GITHUBIMAGE = 'ghcr.io/' + env.LS_USER + '/lspipepr-' + env.CONTAINER_NAME |
||
244 | env.GITLABIMAGE = 'registry.gitlab.com/linuxserver.io/' + env.LS_REPO + '/lspipepr-' + env.CONTAINER_NAME |
||
245 | env.QUAYIMAGE = 'quay.io/linuxserver.io/lspipepr-' + env.CONTAINER_NAME |
||
246 | if (env.MULTIARCH == 'true') { |
||
247 | env.CI_TAGS = 'amd64-develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST + '|arm64v8-develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST |
||
248 | } else { |
||
249 | env.CI_TAGS = 'develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST |
||
250 | } |
||
251 | env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST |
||
252 | env.META_TAG = 'develop-' + env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA + '-pr-' + env.PULL_REQUEST |
||
253 | env.EXT_RELEASE_TAG = 'develop-version-' + env.EXT_RELEASE_CLEAN |
||
254 | env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST |
||
255 | env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/' |
||
256 | env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache' |
||
257 | } |
||
258 | } |
||
259 | } |
||
260 | // Run ShellCheck |
||
261 | stage('ShellCheck') { |
||
262 | when { |
||
263 | environment name: 'CI', value: 'true' |
||
264 | } |
||
265 | steps { |
||
266 | withCredentials([ |
||
267 | string(credentialsId: 'ci-tests-s3-key-id', variable: 'S3_KEY'), |
||
268 | string(credentialsId: 'ci-tests-s3-secret-access-key', variable: 'S3_SECRET') |
||
269 | ]) { |
||
270 | script{ |
||
271 | env.SHELLCHECK_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/shellcheck-result.xml' |
||
272 | } |
||
273 | sh '''curl -sL https://raw.githubusercontent.com/linuxserver/docker-jenkins-builder/master/checkrun.sh | /bin/bash''' |
||
274 | sh '''#! /bin/bash |
||
275 | docker run --rm \ |
||
276 | -v ${WORKSPACE}:/mnt \ |
||
277 | -e AWS_ACCESS_KEY_ID=\"${S3_KEY}\" \ |
||
278 | -e AWS_SECRET_ACCESS_KEY=\"${S3_SECRET}\" \ |
||
279 | ghcr.io/linuxserver/baseimage-alpine:3.20 s6-envdir -fn -- /var/run/s6/container_environment /bin/bash -c "\ |
||
280 | apk add --no-cache python3 && \ |
||
281 | python3 -m venv /lsiopy && \ |
||
282 | pip install --no-cache-dir -U pip && \ |
||
283 | pip install --no-cache-dir s3cmd && \ |
||
284 | s3cmd put --no-preserve --acl-public -m text/xml /mnt/shellcheck-result.xml s3://ci-tests.linuxserver.io/${IMAGE}/${META_TAG}/shellcheck-result.xml" || :''' |
||
285 | } |
||
286 | } |
||
287 | } |
||
288 | // Use helper containers to render templated files |
||
289 | stage('Update-Templates') { |
||
290 | when { |
||
291 | branch "develop" |
||
292 | environment name: 'CHANGE_ID', value: '' |
||
293 | expression { |
||
294 | env.CONTAINER_NAME != null |
||
295 | } |
||
296 | } |
||
297 | steps { |
||
298 | sh '''#! /bin/bash |
||
299 | set -e |
||
300 | TEMPDIR=$(mktemp -d) |
||
301 | docker pull ghcr.io/linuxserver/jenkins-builder:latest |
||
302 | # Cloned repo paths for templating: |
||
303 | # ${TEMPDIR}/docker-${CONTAINER_NAME}: Cloned branch develop of ${LS_USER}/${LS_REPO} for running the jenkins builder on |
||
304 | # ${TEMPDIR}/repo/${LS_REPO}: Cloned branch develop of ${LS_USER}/${LS_REPO} for commiting various templated file changes and pushing back to Github |
||
305 | # ${TEMPDIR}/docs/docker-documentation: Cloned docs repo for pushing docs updates to Github |
||
306 | # ${TEMPDIR}/unraid/docker-templates: Cloned docker-templates repo to check for logos |
||
307 | # ${TEMPDIR}/unraid/templates: Cloned templates repo for commiting unraid template changes and pushing back to Github |
||
308 | git clone --branch develop --depth 1 https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/docker-${CONTAINER_NAME} |
||
309 | docker run --rm -v ${TEMPDIR}/docker-${CONTAINER_NAME}:/tmp -e LOCAL=true -e PUID=$(id -u) -e PGID=$(id -g) ghcr.io/linuxserver/jenkins-builder:latest |
||
310 | echo "Starting Stage 1 - Jenkinsfile update" |
||
311 | if [[ "$(md5sum Jenkinsfile | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile | awk '{ print $1 }')" ]]; then |
||
312 | mkdir -p ${TEMPDIR}/repo |
||
313 | git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} |
||
314 | cd ${TEMPDIR}/repo/${LS_REPO} |
||
315 | git checkout -f develop |
||
316 | cp ${TEMPDIR}/docker-${CONTAINER_NAME}/Jenkinsfile ${TEMPDIR}/repo/${LS_REPO}/ |
||
317 | git add Jenkinsfile |
||
318 | git commit -m 'Bot Updating Templated Files' |
||
319 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
320 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
321 | echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} |
||
322 | echo "Updating Jenkinsfile and exiting build, new one will trigger based on commit" |
||
323 | rm -Rf ${TEMPDIR} |
||
324 | exit 0 |
||
325 | else |
||
326 | echo "Jenkinsfile is up to date." |
||
327 | fi |
||
328 | echo "Starting Stage 2 - Delete old templates" |
||
329 | OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml .github/workflows/package_trigger.yml" |
||
330 | for i in ${OLD_TEMPLATES}; do |
||
331 | if [[ -f "${i}" ]]; then |
||
332 | TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}" |
||
333 | fi |
||
334 | done |
||
335 | if [[ -n "${TEMPLATES_TO_DELETE}" ]]; then |
||
336 | mkdir -p ${TEMPDIR}/repo |
||
337 | git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} |
||
338 | cd ${TEMPDIR}/repo/${LS_REPO} |
||
339 | git checkout -f develop |
||
340 | for i in ${TEMPLATES_TO_DELETE}; do |
||
341 | git rm "${i}" |
||
342 | done |
||
343 | git commit -m 'Bot Updating Templated Files' |
||
344 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
345 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
346 | echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} |
||
347 | echo "Deleting old/deprecated templates and exiting build, new one will trigger based on commit" |
||
348 | rm -Rf ${TEMPDIR} |
||
349 | exit 0 |
||
350 | else |
||
351 | echo "No templates to delete" |
||
352 | fi |
||
353 | echo "Starting Stage 2.5 - Update init diagram" |
||
354 | if ! grep -q 'init_diagram:' readme-vars.yml; then |
||
355 | echo "Adding the key 'init_diagram' to readme-vars.yml" |
||
356 | sed -i '\\|^#.*changelog.*$|d' readme-vars.yml |
||
357 | sed -i 's|^changelogs:|# init diagram\\ninit_diagram:\\n\\n# changelog\\nchangelogs:|' readme-vars.yml |
||
358 | fi |
||
359 | mkdir -p ${TEMPDIR}/d2 |
||
360 | docker run --rm -v ${TEMPDIR}/d2:/output -e PUID=$(id -u) -e PGID=$(id -g) -e RAW="true" ghcr.io/linuxserver/d2-builder:latest ${CONTAINER_NAME}:develop |
||
361 | ls -al ${TEMPDIR}/d2 |
||
362 | yq -ei ".init_diagram |= load_str(\\"${TEMPDIR}/d2/${CONTAINER_NAME}-develop.d2\\")" readme-vars.yml |
||
363 | if [[ $(md5sum readme-vars.yml | cut -c1-8) != $(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/readme-vars.yml | cut -c1-8) ]]; then |
||
364 | echo "'init_diagram' has been updated. Updating repo and exiting build, new one will trigger based on commit." |
||
365 | mkdir -p ${TEMPDIR}/repo |
||
366 | git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} |
||
367 | cd ${TEMPDIR}/repo/${LS_REPO} |
||
368 | git checkout -f develop |
||
369 | cp ${WORKSPACE}/readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/readme-vars.yml |
||
370 | git add readme-vars.yml |
||
371 | git commit -m 'Bot Updating Templated Files' |
||
372 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
373 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
374 | echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} |
||
375 | echo "Updating templates and exiting build, new one will trigger based on commit" |
||
376 | rm -Rf ${TEMPDIR} |
||
377 | exit 0 |
||
378 | else |
||
379 | echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} |
||
380 | echo "Init diagram is unchanged" |
||
381 | fi |
||
382 | echo "Starting Stage 3 - Update templates" |
||
383 | CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) |
||
384 | cd ${TEMPDIR}/docker-${CONTAINER_NAME} |
||
385 | NEWHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) |
||
386 | if [[ "${CURRENTHASH}" != "${NEWHASH}" ]] || ! grep -q '.jenkins-external' "${WORKSPACE}/.gitignore" 2>/dev/null; then |
||
387 | mkdir -p ${TEMPDIR}/repo |
||
388 | git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO} |
||
389 | cd ${TEMPDIR}/repo/${LS_REPO} |
||
390 | git checkout -f develop |
||
391 | cd ${TEMPDIR}/docker-${CONTAINER_NAME} |
||
392 | mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/workflows |
||
393 | mkdir -p ${TEMPDIR}/repo/${LS_REPO}/.github/ISSUE_TEMPLATE |
||
394 | cp --parents ${TEMPLATED_FILES} ${TEMPDIR}/repo/${LS_REPO}/ || : |
||
395 | cp --parents readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/ || : |
||
396 | cd ${TEMPDIR}/repo/${LS_REPO}/ |
||
397 | if ! grep -q '.jenkins-external' .gitignore 2>/dev/null; then |
||
398 | echo ".jenkins-external" >> .gitignore |
||
399 | git add .gitignore |
||
400 | fi |
||
401 | git add readme-vars.yml ${TEMPLATED_FILES} |
||
402 | git commit -m 'Bot Updating Templated Files' |
||
403 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
404 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
405 | echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} |
||
406 | echo "Updating templates and exiting build, new one will trigger based on commit" |
||
407 | rm -Rf ${TEMPDIR} |
||
408 | exit 0 |
||
409 | else |
||
410 | echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER} |
||
411 | echo "No templates to update" |
||
412 | fi |
||
413 | echo "Starting Stage 4 - External repo updates: Docs, Unraid Template and Readme Sync to Docker Hub" |
||
414 | mkdir -p ${TEMPDIR}/docs |
||
415 | git clone --depth=1 https://github.com/linuxserver/docker-documentation.git ${TEMPDIR}/docs/docker-documentation |
||
416 | if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md) || ("$(md5sum ${TEMPDIR}/docs/docker-documentation/docs/images/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md | awk '{ print $1 }')") ]]; then |
||
417 | cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/docker-${CONTAINER_NAME}.md ${TEMPDIR}/docs/docker-documentation/docs/images/ |
||
418 | cd ${TEMPDIR}/docs/docker-documentation |
||
419 | GH_DOCS_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||') |
||
420 | git add docs/images/docker-${CONTAINER_NAME}.md |
||
421 | echo "Updating docs repo" |
||
422 | git commit -m 'Bot Updating Documentation' |
||
423 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase |
||
424 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} || \ |
||
425 | (MAXWAIT="10" && echo "Push to docs failed, trying again in ${MAXWAIT} seconds" && \ |
||
426 | sleep $((RANDOM % MAXWAIT)) && \ |
||
427 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH} --rebase && \ |
||
428 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/docker-documentation.git ${GH_DOCS_DEFAULT_BRANCH}) |
||
429 | else |
||
430 | echo "Docs update not needed, skipping" |
||
431 | fi |
||
432 | mkdir -p ${TEMPDIR}/unraid |
||
433 | git clone --depth=1 https://github.com/linuxserver/docker-templates.git ${TEMPDIR}/unraid/docker-templates |
||
434 | git clone --depth=1 https://github.com/linuxserver/templates.git ${TEMPDIR}/unraid/templates |
||
435 | if [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-logo.png ]]; then |
||
436 | sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-logo.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml |
||
437 | elif [[ -f ${TEMPDIR}/unraid/docker-templates/linuxserver.io/img/${CONTAINER_NAME}-icon.png ]]; then |
||
438 | sed -i "s|master/linuxserver.io/img/linuxserver-ls-logo.png|master/linuxserver.io/img/${CONTAINER_NAME}-icon.png|" ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml |
||
439 | fi |
||
440 | if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]] && [[ (! -f ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml) || ("$(md5sum ${TEMPDIR}/unraid/templates/unraid/${CONTAINER_NAME}.xml | awk '{ print $1 }')" != "$(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml | awk '{ print $1 }')") ]]; then |
||
441 | echo "Updating Unraid template" |
||
442 | cd ${TEMPDIR}/unraid/templates/ |
||
443 | GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||') |
||
444 | if grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list && [[ -f ${TEMPDIR}/unraid/templates/unraid/deprecated/${CONTAINER_NAME}.xml ]]; then |
||
445 | echo "Image is on the ignore list, and already in the deprecation folder." |
||
446 | elif grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then |
||
447 | echo "Image is on the ignore list, marking Unraid template as deprecated" |
||
448 | cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/ |
||
449 | git add -u unraid/${CONTAINER_NAME}.xml |
||
450 | git mv unraid/${CONTAINER_NAME}.xml unraid/deprecated/${CONTAINER_NAME}.xml || : |
||
451 | git commit -m 'Bot Moving Deprecated Unraid Template' || : |
||
452 | else |
||
453 | cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/ |
||
454 | git add unraid/${CONTAINER_NAME}.xml |
||
455 | git commit -m 'Bot Updating Unraid Template' |
||
456 | fi |
||
457 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} --rebase |
||
458 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} || \ |
||
459 | (MAXWAIT="10" && echo "Push to unraid templates failed, trying again in ${MAXWAIT} seconds" && \ |
||
460 | sleep $((RANDOM % MAXWAIT)) && \ |
||
461 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH} --rebase && \ |
||
462 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/linuxserver/templates.git ${GH_TEMPLATES_DEFAULT_BRANCH}) |
||
463 | else |
||
464 | echo "No updates to Unraid template needed, skipping" |
||
465 | fi |
||
466 | if [[ "${BRANCH_NAME}" == "${GH_DEFAULT_BRANCH}" ]]; then |
||
467 | if [[ $(cat ${TEMPDIR}/docker-${CONTAINER_NAME}/README.md | wc -m) -gt 25000 ]]; then |
||
468 | echo "Readme is longer than 25,000 characters. Syncing the lite version to Docker Hub" |
||
469 | DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/README.lite" |
||
470 | else |
||
471 | echo "Syncing readme to Docker Hub" |
||
472 | DH_README_SYNC_PATH="${TEMPDIR}/docker-${CONTAINER_NAME}/README.md" |
||
473 | fi |
||
474 | if curl -s https://hub.docker.com/v2/namespaces/${DOCKERHUB_IMAGE%%/*}/repositories/${DOCKERHUB_IMAGE##*/}/tags | jq -r '.message' | grep -q 404; then |
||
475 | echo "Docker Hub endpoint doesn't exist. Creating endpoint first." |
||
476 | DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token') |
||
477 | curl -s \ |
||
478 | -H "Authorization: JWT ${DH_TOKEN}" \ |
||
479 | -H "Content-Type: application/json" \ |
||
480 | -X POST \ |
||
481 | -d '{"name":"'${DOCKERHUB_IMAGE##*/}'", "namespace":"'${DOCKERHUB_IMAGE%%/*}'"}' \ |
||
482 | https://hub.docker.com/v2/repositories/ || : |
||
483 | fi |
||
484 | DH_TOKEN=$(curl -d '{"username":"linuxserverci", "password":"'${DOCKERHUB_TOKEN}'"}' -H "Content-Type: application/json" -X POST https://hub.docker.com/v2/users/login | jq -r '.token') |
||
485 | curl -s \ |
||
486 | -H "Authorization: JWT ${DH_TOKEN}" \ |
||
487 | -H "Content-Type: application/json" \ |
||
488 | -X PATCH \ |
||
489 | -d "{\\"full_description\\":$(jq -Rsa . ${DH_README_SYNC_PATH})}" \ |
||
490 | https://hub.docker.com/v2/repositories/${DOCKERHUB_IMAGE} || : |
||
491 | else |
||
492 | echo "Not the default Github branch. Skipping readme sync to Docker Hub." |
||
493 | fi |
||
494 | rm -Rf ${TEMPDIR}''' |
||
495 | script{ |
||
496 | env.FILES_UPDATED = sh( |
||
497 | script: '''cat /tmp/${COMMIT_SHA}-${BUILD_NUMBER}''', |
||
498 | returnStdout: true).trim() |
||
499 | } |
||
500 | } |
||
501 | } |
||
502 | // Exit the build if the Templated files were just updated |
||
503 | stage('Template-exit') { |
||
504 | when { |
||
505 | branch "develop" |
||
506 | environment name: 'CHANGE_ID', value: '' |
||
507 | environment name: 'FILES_UPDATED', value: 'true' |
||
508 | expression { |
||
509 | env.CONTAINER_NAME != null |
||
510 | } |
||
511 | } |
||
512 | steps { |
||
513 | script{ |
||
514 | env.EXIT_STATUS = 'ABORTED' |
||
515 | } |
||
516 | } |
||
517 | } |
||
518 | // If this is a develop build check the S6 service file perms |
||
519 | stage("Check S6 Service file Permissions"){ |
||
520 | when { |
||
521 | branch "develop" |
||
522 | environment name: 'CHANGE_ID', value: '' |
||
523 | environment name: 'EXIT_STATUS', value: '' |
||
524 | } |
||
525 | steps { |
||
526 | script{ |
||
527 | sh '''#! /bin/bash |
||
528 | WRONG_PERM=$(find ./ -path "./.git" -prune -o \\( -name "run" -o -name "finish" -o -name "check" \\) -not -perm -u=x,g=x,o=x -print) |
||
529 | if [[ -n "${WRONG_PERM}" ]]; then |
||
530 | echo "The following S6 service files are missing the executable bit; canceling the faulty build: ${WRONG_PERM}" |
||
531 | exit 1 |
||
532 | else |
||
533 | echo "S6 service file perms look good." |
||
534 | fi ''' |
||
535 | } |
||
536 | } |
||
537 | } |
||
538 | /* ####################### |
||
539 | GitLab Mirroring and Quay.io Repo Visibility |
||
540 | ####################### */ |
||
541 | // Ping into Gitlab to mirror this repo and have a registry endpoint & mark this repo on Quay.io as public |
||
542 | stage("GitLab Mirror and Quay.io Visibility"){ |
||
543 | when { |
||
544 | environment name: 'EXIT_STATUS', value: '' |
||
545 | } |
||
546 | steps{ |
||
547 | sh '''curl -H "Content-Type: application/json" -H "Private-Token: ${GITLAB_TOKEN}" -X POST https://gitlab.com/api/v4/projects \ |
||
548 | -d '{"namespace_id":'${GITLAB_NAMESPACE}',\ |
||
549 | "name":"'${LS_REPO}'", |
||
550 | "mirror":true,\ |
||
551 | "import_url":"https://github.com/linuxserver/'${LS_REPO}'.git",\ |
||
552 | "issues_access_level":"disabled",\ |
||
553 | "merge_requests_access_level":"disabled",\ |
||
554 | "repository_access_level":"enabled",\ |
||
555 | "visibility":"public"}' ''' |
||
556 | sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \ |
||
557 | -d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" ''' |
||
558 | sh '''curl -H "Content-Type: application/json" -H "Authorization: Bearer ${QUAYIO_API_TOKEN}" -X POST "https://quay.io/api/v1/repository${QUAYIMAGE/quay.io/}/changevisibility" \ |
||
559 | -d '{"visibility":"public"}' ||: ''' |
||
560 | } |
||
561 | } |
||
562 | /* ############### |
||
563 | Build Container |
||
564 | ############### */ |
||
565 | // Build Docker container for push to LS Repo |
||
566 | stage('Build-Single') { |
||
567 | when { |
||
568 | expression { |
||
569 | env.MULTIARCH == 'false' || params.PACKAGE_CHECK == 'true' |
||
570 | } |
||
571 | environment name: 'EXIT_STATUS', value: '' |
||
572 | } |
||
573 | steps { |
||
574 | echo "Running on node: ${NODE_NAME}" |
||
575 | sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile" |
||
576 | sh "docker buildx build \ |
||
577 | --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ |
||
578 | --label \"org.opencontainers.image.authors=linuxserver.io\" \ |
||
579 | --label \"org.opencontainers.image.url=https://github.com/linuxserver/docker-readarr/packages\" \ |
||
580 | --label \"org.opencontainers.image.documentation=https://docs.linuxserver.io/images/docker-readarr\" \ |
||
581 | --label \"org.opencontainers.image.source=https://github.com/linuxserver/docker-readarr\" \ |
||
582 | --label \"org.opencontainers.image.version=${EXT_RELEASE_CLEAN}-ls${LS_TAG_NUMBER}\" \ |
||
583 | --label \"org.opencontainers.image.revision=${COMMIT_SHA}\" \ |
||
584 | --label \"org.opencontainers.image.vendor=linuxserver.io\" \ |
||
585 | --label \"org.opencontainers.image.licenses=GPL-3.0-only\" \ |
||
586 | --label \"org.opencontainers.image.ref.name=${COMMIT_SHA}\" \ |
||
587 | --label \"org.opencontainers.image.title=Readarr\" \ |
||
588 | --label \"org.opencontainers.image.description=[Readarr](https://github.com/Readarr/Readarr) - Book Manager and Automation (Sonarr for Ebooks) \" \ |
||
589 | --no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \ |
||
590 | --provenance=true --sbom=true --builder=container --load \ |
||
591 | --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." |
||
592 | sh '''#! /bin/bash |
||
593 | set -e |
||
594 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
595 | for i in "${CACHE[@]}"; do |
||
596 | docker tag ${IMAGE}:${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} |
||
597 | done |
||
598 | ''' |
||
599 | withCredentials([ |
||
600 | [ |
||
601 | $class: 'UsernamePasswordMultiBinding', |
||
602 | credentialsId: 'Quay.io-Robot', |
||
603 | usernameVariable: 'QUAYUSER', |
||
604 | passwordVariable: 'QUAYPASS' |
||
605 | ] |
||
606 | ]) { |
||
607 | retry_backoff(5,5) { |
||
608 | sh '''#! /bin/bash |
||
609 | set -e |
||
610 | echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin |
||
611 | echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin |
||
612 | echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin |
||
613 | echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin |
||
614 | if [[ "${PACKAGE_CHECK}" != "true" ]]; then |
||
615 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
616 | for i in "${CACHE[@]}"; do |
||
617 | docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} & |
||
618 | done |
||
619 | for p in $(jobs -p); do |
||
620 | wait "$p" || { echo "job $p failed" >&2; exit 1; } |
||
621 | done |
||
622 | fi |
||
623 | ''' |
||
624 | } |
||
625 | } |
||
626 | } |
||
627 | } |
||
628 | // Build MultiArch Docker containers for push to LS Repo |
||
629 | stage('Build-Multi') { |
||
630 | when { |
||
631 | allOf { |
||
632 | environment name: 'MULTIARCH', value: 'true' |
||
633 | expression { params.PACKAGE_CHECK == 'false' } |
||
634 | } |
||
635 | environment name: 'EXIT_STATUS', value: '' |
||
636 | } |
||
637 | parallel { |
||
638 | stage('Build X86') { |
||
639 | steps { |
||
640 | echo "Running on node: ${NODE_NAME}" |
||
641 | sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile" |
||
642 | sh "docker buildx build \ |
||
643 | --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ |
||
644 | --label \"org.opencontainers.image.authors=linuxserver.io\" \ |
||
645 | --label \"org.opencontainers.image.url=https://github.com/linuxserver/docker-readarr/packages\" \ |
||
646 | --label \"org.opencontainers.image.documentation=https://docs.linuxserver.io/images/docker-readarr\" \ |
||
647 | --label \"org.opencontainers.image.source=https://github.com/linuxserver/docker-readarr\" \ |
||
648 | --label \"org.opencontainers.image.version=${EXT_RELEASE_CLEAN}-ls${LS_TAG_NUMBER}\" \ |
||
649 | --label \"org.opencontainers.image.revision=${COMMIT_SHA}\" \ |
||
650 | --label \"org.opencontainers.image.vendor=linuxserver.io\" \ |
||
651 | --label \"org.opencontainers.image.licenses=GPL-3.0-only\" \ |
||
652 | --label \"org.opencontainers.image.ref.name=${COMMIT_SHA}\" \ |
||
653 | --label \"org.opencontainers.image.title=Readarr\" \ |
||
654 | --label \"org.opencontainers.image.description=[Readarr](https://github.com/Readarr/Readarr) - Book Manager and Automation (Sonarr for Ebooks) \" \ |
||
655 | --no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \ |
||
656 | --provenance=true --sbom=true --builder=container --load \ |
||
657 | --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." |
||
658 | sh '''#! /bin/bash |
||
659 | set -e |
||
660 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
661 | for i in "${CACHE[@]}"; do |
||
662 | docker tag ${IMAGE}:amd64-${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} |
||
663 | done |
||
664 | ''' |
||
665 | withCredentials([ |
||
666 | [ |
||
667 | $class: 'UsernamePasswordMultiBinding', |
||
668 | credentialsId: 'Quay.io-Robot', |
||
669 | usernameVariable: 'QUAYUSER', |
||
670 | passwordVariable: 'QUAYPASS' |
||
671 | ] |
||
672 | ]) { |
||
673 | retry_backoff(5,5) { |
||
674 | sh '''#! /bin/bash |
||
675 | set -e |
||
676 | echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin |
||
677 | echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin |
||
678 | echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin |
||
679 | echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin |
||
680 | if [[ "${PACKAGE_CHECK}" != "true" ]]; then |
||
681 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
682 | for i in "${CACHE[@]}"; do |
||
683 | docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} & |
||
684 | done |
||
685 | for p in $(jobs -p); do |
||
686 | wait "$p" || { echo "job $p failed" >&2; exit 1; } |
||
687 | done |
||
688 | fi |
||
689 | ''' |
||
690 | } |
||
691 | } |
||
692 | } |
||
693 | } |
||
694 | stage('Build ARM64') { |
||
695 | agent { |
||
696 | label 'ARM64' |
||
697 | } |
||
698 | steps { |
||
699 | echo "Running on node: ${NODE_NAME}" |
||
700 | sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64" |
||
701 | sh "docker buildx build \ |
||
702 | --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ |
||
703 | --label \"org.opencontainers.image.authors=linuxserver.io\" \ |
||
704 | --label \"org.opencontainers.image.url=https://github.com/linuxserver/docker-readarr/packages\" \ |
||
705 | --label \"org.opencontainers.image.documentation=https://docs.linuxserver.io/images/docker-readarr\" \ |
||
706 | --label \"org.opencontainers.image.source=https://github.com/linuxserver/docker-readarr\" \ |
||
707 | --label \"org.opencontainers.image.version=${EXT_RELEASE_CLEAN}-ls${LS_TAG_NUMBER}\" \ |
||
708 | --label \"org.opencontainers.image.revision=${COMMIT_SHA}\" \ |
||
709 | --label \"org.opencontainers.image.vendor=linuxserver.io\" \ |
||
710 | --label \"org.opencontainers.image.licenses=GPL-3.0-only\" \ |
||
711 | --label \"org.opencontainers.image.ref.name=${COMMIT_SHA}\" \ |
||
712 | --label \"org.opencontainers.image.title=Readarr\" \ |
||
713 | --label \"org.opencontainers.image.description=[Readarr](https://github.com/Readarr/Readarr) - Book Manager and Automation (Sonarr for Ebooks) \" \ |
||
714 | --no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \ |
||
715 | --provenance=true --sbom=true --builder=container --load \ |
||
716 | --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." |
||
717 | sh '''#! /bin/bash |
||
718 | set -e |
||
719 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
720 | for i in "${CACHE[@]}"; do |
||
721 | docker tag ${IMAGE}:arm64v8-${META_TAG} ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} |
||
722 | done |
||
723 | ''' |
||
724 | withCredentials([ |
||
725 | [ |
||
726 | $class: 'UsernamePasswordMultiBinding', |
||
727 | credentialsId: 'Quay.io-Robot', |
||
728 | usernameVariable: 'QUAYUSER', |
||
729 | passwordVariable: 'QUAYPASS' |
||
730 | ] |
||
731 | ]) { |
||
732 | retry_backoff(5,5) { |
||
733 | sh '''#! /bin/bash |
||
734 | set -e |
||
735 | echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin |
||
736 | echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin |
||
737 | echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin |
||
738 | echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin |
||
739 | if [[ "${PACKAGE_CHECK}" != "true" ]]; then |
||
740 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
741 | for i in "${CACHE[@]}"; do |
||
742 | docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} & |
||
743 | done |
||
744 | for p in $(jobs -p); do |
||
745 | wait "$p" || { echo "job $p failed" >&2; exit 1; } |
||
746 | done |
||
747 | fi |
||
748 | ''' |
||
749 | } |
||
750 | } |
||
751 | sh '''#! /bin/bash |
||
752 | containers=$(docker ps -aq) |
||
753 | if [[ -n "${containers}" ]]; then |
||
754 | docker stop ${containers} |
||
755 | fi |
||
756 | docker system prune -f --volumes || : |
||
757 | docker image prune -af || : |
||
758 | ''' |
||
759 | } |
||
760 | } |
||
761 | } |
||
762 | } |
||
763 | // Take the image we just built and dump package versions for comparison |
||
764 | stage('Update-packages') { |
||
765 | when { |
||
766 | branch "develop" |
||
767 | environment name: 'CHANGE_ID', value: '' |
||
768 | environment name: 'EXIT_STATUS', value: '' |
||
769 | } |
||
770 | steps { |
||
771 | sh '''#! /bin/bash |
||
772 | set -e |
||
773 | TEMPDIR=$(mktemp -d) |
||
774 | if [ "${MULTIARCH}" == "true" ] && [ "${PACKAGE_CHECK}" != "true" ]; then |
||
775 | LOCAL_CONTAINER=${IMAGE}:amd64-${META_TAG} |
||
776 | else |
||
777 | LOCAL_CONTAINER=${IMAGE}:${META_TAG} |
||
778 | fi |
||
779 | touch ${TEMPDIR}/package_versions.txt |
||
780 | docker run --rm \ |
||
781 | -v /var/run/docker.sock:/var/run/docker.sock:ro \ |
||
782 | -v ${TEMPDIR}:/tmp \ |
||
783 | ghcr.io/anchore/syft:v1.26.1 \ |
||
784 | ${LOCAL_CONTAINER} -o table=/tmp/package_versions.txt |
||
785 | NEW_PACKAGE_TAG=$(md5sum ${TEMPDIR}/package_versions.txt | cut -c1-8 ) |
||
786 | echo "Package tag sha from current packages in buit container is ${NEW_PACKAGE_TAG} comparing to old ${PACKAGE_TAG} from github" |
||
787 | if [ "${NEW_PACKAGE_TAG}" != "${PACKAGE_TAG}" ]; then |
||
788 | git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/${LS_REPO} |
||
789 | git --git-dir ${TEMPDIR}/${LS_REPO}/.git checkout -f develop |
||
790 | cp ${TEMPDIR}/package_versions.txt ${TEMPDIR}/${LS_REPO}/ |
||
791 | cd ${TEMPDIR}/${LS_REPO}/ |
||
792 | wait |
||
793 | git add package_versions.txt |
||
794 | git commit -m 'Bot Updating Package Versions' |
||
795 | git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
796 | git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git develop |
||
797 | echo "true" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER} |
||
798 | echo "Package tag updated, stopping build process" |
||
799 | else |
||
800 | echo "false" > /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER} |
||
801 | echo "Package tag is same as previous continue with build process" |
||
802 | fi |
||
803 | rm -Rf ${TEMPDIR}''' |
||
804 | script{ |
||
805 | env.PACKAGE_UPDATED = sh( |
||
806 | script: '''cat /tmp/packages-${COMMIT_SHA}-${BUILD_NUMBER}''', |
||
807 | returnStdout: true).trim() |
||
808 | } |
||
809 | } |
||
810 | } |
||
811 | // Exit the build if the package file was just updated |
||
812 | stage('PACKAGE-exit') { |
||
813 | when { |
||
814 | branch "develop" |
||
815 | environment name: 'CHANGE_ID', value: '' |
||
816 | environment name: 'PACKAGE_UPDATED', value: 'true' |
||
817 | environment name: 'EXIT_STATUS', value: '' |
||
818 | } |
||
819 | steps { |
||
820 | script{ |
||
821 | env.EXIT_STATUS = 'ABORTED' |
||
822 | } |
||
823 | } |
||
824 | } |
||
825 | // Exit the build if this is just a package check and there are no changes to push |
||
826 | stage('PACKAGECHECK-exit') { |
||
827 | when { |
||
828 | branch "develop" |
||
829 | environment name: 'CHANGE_ID', value: '' |
||
830 | environment name: 'PACKAGE_UPDATED', value: 'false' |
||
831 | environment name: 'EXIT_STATUS', value: '' |
||
832 | expression { |
||
833 | params.PACKAGE_CHECK == 'true' |
||
834 | } |
||
835 | } |
||
836 | steps { |
||
837 | script{ |
||
838 | env.EXIT_STATUS = 'ABORTED' |
||
839 | } |
||
840 | } |
||
841 | } |
||
842 | /* ####### |
||
843 | Testing |
||
844 | ####### */ |
||
845 | // Run Container tests |
||
846 | stage('Test') { |
||
847 | when { |
||
848 | environment name: 'CI', value: 'true' |
||
849 | environment name: 'EXIT_STATUS', value: '' |
||
850 | } |
||
851 | steps { |
||
852 | withCredentials([ |
||
853 | string(credentialsId: 'ci-tests-s3-key-id', variable: 'S3_KEY'), |
||
854 | string(credentialsId: 'ci-tests-s3-secret-access-key ', variable: 'S3_SECRET') |
||
855 | ]) { |
||
856 | script{ |
||
857 | env.CI_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/index.html' |
||
858 | env.CI_JSON_URL = 'https://ci-tests.linuxserver.io/' + env.IMAGE + '/' + env.META_TAG + '/report.json' |
||
859 | } |
||
860 | sh '''#! /bin/bash |
||
861 | set -e |
||
862 | if grep -q 'docker-baseimage' <<< "${LS_REPO}"; then |
||
863 | echo "Detected baseimage, setting LSIO_FIRST_PARTY=true" |
||
864 | if [ -n "${CI_DOCKERENV}" ]; then |
||
865 | CI_DOCKERENV="LSIO_FIRST_PARTY=true|${CI_DOCKERENV}" |
||
866 | else |
||
867 | CI_DOCKERENV="LSIO_FIRST_PARTY=true" |
||
868 | fi |
||
869 | fi |
||
870 | docker pull ghcr.io/linuxserver/ci:latest |
||
871 | if [ "${MULTIARCH}" == "true" ]; then |
||
872 | docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64 |
||
873 | docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG} |
||
874 | fi |
||
875 | docker run --rm \ |
||
876 | --shm-size=1gb \ |
||
877 | -v /var/run/docker.sock:/var/run/docker.sock \ |
||
878 | -e IMAGE=\"${IMAGE}\" \ |
||
879 | -e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \ |
||
880 | -e TAGS=\"${CI_TAGS}\" \ |
||
881 | -e META_TAG=\"${META_TAG}\" \ |
||
882 | -e RELEASE_TAG=\"develop\" \ |
||
883 | -e PORT=\"${CI_PORT}\" \ |
||
884 | -e SSL=\"${CI_SSL}\" \ |
||
885 | -e BASE=\"${DIST_IMAGE}\" \ |
||
886 | -e SECRET_KEY=\"${S3_SECRET}\" \ |
||
887 | -e ACCESS_KEY=\"${S3_KEY}\" \ |
||
888 | -e DOCKER_ENV=\"${CI_DOCKERENV}\" \ |
||
889 | -e WEB_SCREENSHOT=\"${CI_WEB}\" \ |
||
890 | -e WEB_AUTH=\"${CI_AUTH}\" \ |
||
891 | -e WEB_PATH=\"${CI_WEBPATH}\" \ |
||
892 | -e NODE_NAME=\"${NODE_NAME}\" \ |
||
893 | -t ghcr.io/linuxserver/ci:latest \ |
||
894 | python3 test_build.py''' |
||
895 | } |
||
896 | } |
||
897 | } |
||
898 | /* ################## |
||
899 | Release Logic |
||
900 | ################## */ |
||
901 | // If this is an amd64 only image only push a single image |
||
902 | stage('Docker-Push-Single') { |
||
903 | when { |
||
904 | environment name: 'MULTIARCH', value: 'false' |
||
905 | environment name: 'EXIT_STATUS', value: '' |
||
906 | } |
||
907 | steps { |
||
908 | retry_backoff(5,5) { |
||
909 | sh '''#! /bin/bash |
||
910 | set -e |
||
911 | for PUSHIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do |
||
912 | [[ ${PUSHIMAGE%%/*} =~ \\. ]] && PUSHIMAGEPLUS="${PUSHIMAGE}" || PUSHIMAGEPLUS="docker.io/${PUSHIMAGE}" |
||
913 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
914 | for i in "${CACHE[@]}"; do |
||
915 | if [[ "${PUSHIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then |
||
916 | CACHEIMAGE=${i} |
||
917 | fi |
||
918 | done |
||
919 | docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${META_TAG} -t ${PUSHIMAGE}:develop -t ${PUSHIMAGE}:${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} |
||
920 | if [ -n "${SEMVER}" ]; then |
||
921 | docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} |
||
922 | fi |
||
923 | done |
||
924 | ''' |
||
925 | } |
||
926 | } |
||
927 | } |
||
928 | // If this is a multi arch release push all images and define the manifest |
||
929 | stage('Docker-Push-Multi') { |
||
930 | when { |
||
931 | environment name: 'MULTIARCH', value: 'true' |
||
932 | environment name: 'EXIT_STATUS', value: '' |
||
933 | } |
||
934 | steps { |
||
935 | retry_backoff(5,5) { |
||
936 | sh '''#! /bin/bash |
||
937 | set -e |
||
938 | for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do |
||
939 | [[ ${MANIFESTIMAGE%%/*} =~ \\. ]] && MANIFESTIMAGEPLUS="${MANIFESTIMAGE}" || MANIFESTIMAGEPLUS="docker.io/${MANIFESTIMAGE}" |
||
940 | IFS=',' read -ra CACHE <<< "$BUILDCACHE" |
||
941 | for i in "${CACHE[@]}"; do |
||
942 | if [[ "${MANIFESTIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then |
||
943 | CACHEIMAGE=${i} |
||
944 | fi |
||
945 | done |
||
946 | docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${META_TAG} -t ${MANIFESTIMAGE}:amd64-develop -t ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} |
||
947 | docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${META_TAG} -t ${MANIFESTIMAGE}:arm64v8-develop -t ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} |
||
948 | if [ -n "${SEMVER}" ]; then |
||
949 | docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} |
||
950 | docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${SEMVER} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} |
||
951 | fi |
||
952 | done |
||
953 | for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do |
||
954 | docker buildx imagetools create -t ${MANIFESTIMAGE}:develop ${MANIFESTIMAGE}:amd64-develop ${MANIFESTIMAGE}:arm64v8-develop |
||
955 | docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG} |
||
956 | |||
957 | docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} |
||
958 | if [ -n "${SEMVER}" ]; then |
||
959 | docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER} |
||
960 | fi |
||
961 | done |
||
962 | ''' |
||
963 | } |
||
964 | } |
||
965 | } |
||
966 | // If this is a public release tag it in the LS Github |
||
967 | stage('Github-Tag-Push-Release') { |
||
968 | when { |
||
969 | branch "develop" |
||
970 | expression { |
||
971 | env.LS_RELEASE != env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER |
||
972 | } |
||
973 | environment name: 'CHANGE_ID', value: '' |
||
974 | environment name: 'EXIT_STATUS', value: '' |
||
975 | } |
||
976 | steps { |
||
977 | echo "Pushing New tag for current commit ${META_TAG}" |
||
978 | sh '''curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/git/tags \ |
||
979 | -d '{"tag":"'${META_TAG}'",\ |
||
980 | "object": "'${COMMIT_SHA}'",\ |
||
981 | "message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to develop",\ |
||
982 | "type": "commit",\ |
||
983 | "tagger": {"name": "LinuxServer-CI","email": "ci@linuxserver.io","date": "'${GITHUB_DATE}'"}}' ''' |
||
984 | echo "Pushing New release for Tag" |
||
985 | sh '''#! /bin/bash |
||
986 | echo "Updating to ${EXT_RELEASE_CLEAN}" > releasebody.json |
||
987 | echo '{"tag_name":"'${META_TAG}'",\ |
||
988 | "target_commitish": "develop",\ |
||
989 | "name": "'${META_TAG}'",\ |
||
990 | "body": "**CI Report:**\\n\\n'${CI_URL:-N/A}'\\n\\n**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start |
||
991 | printf '","draft": false,"prerelease": true}' >> releasebody.json |
||
992 | paste -d'\\0' start releasebody.json > releasebody.json.done |
||
993 | curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done''' |
||
994 | } |
||
995 | } |
||
996 | // Add protection to the release branch |
||
997 | stage('Github-Release-Branch-Protection') { |
||
998 | when { |
||
999 | branch "develop" |
||
1000 | environment name: 'CHANGE_ID', value: '' |
||
1001 | environment name: 'EXIT_STATUS', value: '' |
||
1002 | } |
||
1003 | steps { |
||
1004 | echo "Setting up protection for release branch develop" |
||
1005 | sh '''#! /bin/bash |
||
1006 | curl -H "Authorization: token ${GITHUB_TOKEN}" -X PUT https://api.github.com/repos/${LS_USER}/${LS_REPO}/branches/develop/protection \ |
||
1007 | -d $(jq -c . << EOF |
||
1008 | { |
||
1009 | "required_status_checks": null, |
||
1010 | "enforce_admins": false, |
||
1011 | "required_pull_request_reviews": { |
||
1012 | "dismiss_stale_reviews": false, |
||
1013 | "require_code_owner_reviews": false, |
||
1014 | "require_last_push_approval": false, |
||
1015 | "required_approving_review_count": 1 |
||
1016 | }, |
||
1017 | "restrictions": null, |
||
1018 | "required_linear_history": false, |
||
1019 | "allow_force_pushes": false, |
||
1020 | "allow_deletions": false, |
||
1021 | "block_creations": false, |
||
1022 | "required_conversation_resolution": true, |
||
1023 | "lock_branch": false, |
||
1024 | "allow_fork_syncing": false, |
||
1025 | "required_signatures": false |
||
1026 | } |
||
1027 | EOF |
||
1028 | ) ''' |
||
1029 | } |
||
1030 | } |
||
1031 | // If this is a Pull request send the CI link as a comment on it |
||
1032 | stage('Pull Request Comment') { |
||
1033 | when { |
||
1034 | not {environment name: 'CHANGE_ID', value: ''} |
||
1035 | environment name: 'EXIT_STATUS', value: '' |
||
1036 | } |
||
1037 | steps { |
||
1038 | sh '''#! /bin/bash |
||
1039 | # Function to retrieve JSON data from URL |
||
1040 | get_json() { |
||
1041 | local url="$1" |
||
1042 | local response=$(curl -s "$url") |
||
1043 | if [ $? -ne 0 ]; then |
||
1044 | echo "Failed to retrieve JSON data from $url" |
||
1045 | return 1 |
||
1046 | fi |
||
1047 | local json=$(echo "$response" | jq .) |
||
1048 | if [ $? -ne 0 ]; then |
||
1049 | echo "Failed to parse JSON data from $url" |
||
1050 | return 1 |
||
1051 | fi |
||
1052 | echo "$json" |
||
1053 | } |
||
1054 | |||
1055 | build_table() { |
||
1056 | local data="$1" |
||
1057 | |||
1058 | # Get the keys in the JSON data |
||
1059 | local keys=$(echo "$data" | jq -r 'to_entries | map(.key) | .[]') |
||
1060 | |||
1061 | # Check if keys are empty |
||
1062 | if [ -z "$keys" ]; then |
||
1063 | echo "JSON report data does not contain any keys or the report does not exist." |
||
1064 | return 1 |
||
1065 | fi |
||
1066 | |||
1067 | # Build table header |
||
1068 | local header="| Tag | Passed |\\n| --- | --- |\\n" |
||
1069 | |||
1070 | # Loop through the JSON data to build the table rows |
||
1071 | local rows="" |
||
1072 | for build in $keys; do |
||
1073 | local status=$(echo "$data" | jq -r ".[\\"$build\\"].test_success") |
||
1074 | if [ "$status" = "true" ]; then |
||
1075 | status="✅" |
||
1076 | else |
||
1077 | status="❌" |
||
1078 | fi |
||
1079 | local row="| "$build" | "$status" |\\n" |
||
1080 | rows="${rows}${row}" |
||
1081 | done |
||
1082 | |||
1083 | local table="${header}${rows}" |
||
1084 | local escaped_table=$(echo "$table" | sed 's/\"/\\\\"/g') |
||
1085 | echo "$escaped_table" |
||
1086 | } |
||
1087 | |||
1088 | if [[ "${CI}" = "true" ]]; then |
||
1089 | # Retrieve JSON data from URL |
||
1090 | data=$(get_json "$CI_JSON_URL") |
||
1091 | # Create table from JSON data |
||
1092 | table=$(build_table "$data") |
||
1093 | echo -e "$table" |
||
1094 | |||
1095 | curl -X POST -H "Authorization: token $GITHUB_TOKEN" \ |
||
1096 | -H "Accept: application/vnd.github.v3+json" \ |
||
1097 | "https://api.github.com/repos/$LS_USER/$LS_REPO/issues/$PULL_REQUEST/comments" \ |
||
1098 | -d "{\\"body\\": \\"I am a bot, here are the test results for this PR: \\n${CI_URL}\\n${SHELLCHECK_URL}\\n${table}\\"}" |
||
1099 | else |
||
1100 | curl -X POST -H "Authorization: token $GITHUB_TOKEN" \ |
||
1101 | -H "Accept: application/vnd.github.v3+json" \ |
||
1102 | "https://api.github.com/repos/$LS_USER/$LS_REPO/issues/$PULL_REQUEST/comments" \ |
||
1103 | -d "{\\"body\\": \\"I am a bot, here is the pushed image/manifest for this PR: \\n\\n\\`${GITHUBIMAGE}:${META_TAG}\\`\\"}" |
||
1104 | fi |
||
1105 | ''' |
||
1106 | |||
1107 | } |
||
1108 | } |
||
1109 | } |
||
1110 | /* ###################### |
||
1111 | Send status to Discord |
||
1112 | ###################### */ |
||
1113 | post { |
||
1114 | always { |
||
1115 | sh '''#!/bin/bash |
||
1116 | rm -rf /config/.ssh/id_sign |
||
1117 | rm -rf /config/.ssh/id_sign.pub |
||
1118 | git config --global --unset gpg.format |
||
1119 | git config --global --unset user.signingkey |
||
1120 | git config --global --unset commit.gpgsign |
||
1121 | ''' |
||
1122 | script{ |
||
1123 | env.JOB_DATE = sh( |
||
1124 | script: '''date '+%Y-%m-%dT%H:%M:%S%:z' ''', |
||
1125 | returnStdout: true).trim() |
||
1126 | if (env.EXIT_STATUS == "ABORTED"){ |
||
1127 | sh 'echo "build aborted"' |
||
1128 | }else{ |
||
1129 | if (currentBuild.currentResult == "SUCCESS"){ |
||
1130 | if (env.GITHUBIMAGE =~ /lspipepr/){ |
||
1131 | env.JOB_WEBHOOK_STATUS='Success' |
||
1132 | env.JOB_WEBHOOK_COLOUR=3957028 |
||
1133 | env.JOB_WEBHOOK_FOOTER='PR Build' |
||
1134 | }else if (env.GITHUBIMAGE =~ /lsiodev/){ |
||
1135 | env.JOB_WEBHOOK_STATUS='Success' |
||
1136 | env.JOB_WEBHOOK_COLOUR=3957028 |
||
1137 | env.JOB_WEBHOOK_FOOTER='Dev Build' |
||
1138 | }else{ |
||
1139 | env.JOB_WEBHOOK_STATUS='Success' |
||
1140 | env.JOB_WEBHOOK_COLOUR=1681177 |
||
1141 | env.JOB_WEBHOOK_FOOTER='Live Build' |
||
1142 | } |
||
1143 | }else{ |
||
1144 | if (env.GITHUBIMAGE =~ /lspipepr/){ |
||
1145 | env.JOB_WEBHOOK_STATUS='Failure' |
||
1146 | env.JOB_WEBHOOK_COLOUR=12669523 |
||
1147 | env.JOB_WEBHOOK_FOOTER='PR Build' |
||
1148 | }else if (env.GITHUBIMAGE =~ /lsiodev/){ |
||
1149 | env.JOB_WEBHOOK_STATUS='Failure' |
||
1150 | env.JOB_WEBHOOK_COLOUR=12669523 |
||
1151 | env.JOB_WEBHOOK_FOOTER='Dev Build' |
||
1152 | }else{ |
||
1153 | env.JOB_WEBHOOK_STATUS='Failure' |
||
1154 | env.JOB_WEBHOOK_COLOUR=16711680 |
||
1155 | env.JOB_WEBHOOK_FOOTER='Live Build' |
||
1156 | } |
||
1157 | } |
||
1158 | sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"'color'": '${JOB_WEBHOOK_COLOUR}',\ |
||
1159 | "footer": {"text" : "'"${JOB_WEBHOOK_FOOTER}"'"},\ |
||
1160 | "timestamp": "'${JOB_DATE}'",\ |
||
1161 | "description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** '${JOB_WEBHOOK_STATUS}'\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ |
||
1162 | "username": "Jenkins"}' ${BUILDS_DISCORD} ''' |
||
1163 | } |
||
1164 | } |
||
1165 | } |
||
1166 | cleanup { |
||
1167 | sh '''#! /bin/bash |
||
1168 | echo "Pruning builder!!" |
||
1169 | docker builder prune -f --builder container || : |
||
1170 | containers=$(docker ps -q) |
||
1171 | if [[ -n "${containers}" ]]; then |
||
1172 | BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit') |
||
1173 | for container in ${containers}; do |
||
1174 | if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then |
||
1175 | echo "skipping buildx container in docker stop" |
||
1176 | else |
||
1177 | echo "Stopping container ${container}" |
||
1178 | docker stop ${container} |
||
1179 | fi |
||
1180 | done |
||
1181 | fi |
||
1182 | docker system prune -f --volumes || : |
||
1183 | docker image prune -af || : |
||
1184 | ''' |
||
1185 | cleanWs() |
||
1186 | } |
||
1187 | } |
||
1188 | } |
||
1189 | |||
1190 | def retry_backoff(int max_attempts, int power_base, Closure c) { |
||
1191 | int n = 0 |
||
1192 | while (n < max_attempts) { |
||
1193 | try { |
||
1194 | c() |
||
1195 | return |
||
1196 | } catch (err) { |
||
1197 | if ((n + 1) >= max_attempts) { |
||
1198 | throw err |
||
1199 | } |
||
1200 | sleep(power_base ** n) |
||
1201 | n++ |
||
1202 | } |
||
1203 | } |
||
1204 | return |
||
1205 | } |