Compare commits
412 Commits
Author | SHA1 | Date |
---|---|---|
|
cc7d9b2a26 | |
|
0af8676cb8 | |
|
f55fe34cfb | |
|
987689cc34 | |
|
cb12019fba | |
|
e1231d1520 | |
|
b0959cb192 | |
|
7f213bf685 | |
|
79e68ef97c | |
|
aba42ca8ff | |
|
d805c0c822 | |
|
e83dcfcabf | |
|
7f13540563 | |
|
50c43af45e | |
|
cd259102d4 | |
|
051f0951f1 | |
|
e8a30ae1ea | |
|
a4888b2ce9 | |
|
8faa8b216c | |
|
fd6f70913e | |
|
f3777be65b | |
|
16f757f699 | |
|
26ab1b7744 | |
|
994ba027c2 | |
|
fa70d9e3af | |
|
3e2662f02b | |
|
0f5226e050 | |
|
24800f0f77 | |
|
5ae1659c96 | |
|
3c034bcadc | |
|
7067540a52 | |
|
e3c74c2aa4 | |
|
8c5bb22af7 | |
|
3b33514d26 | |
|
973aa8c2fe | |
|
4d23dd41f0 | |
|
b9186a2b38 | |
|
8b1776b799 | |
|
8218f24c4d | |
|
8f39f4b1af | |
|
99d1c2662e | |
|
32b94cedea | |
|
5ad53bd723 | |
|
24a62a63d3 | |
|
ab1f7624a0 | |
|
35a29e5dfe | |
|
657247095b | |
|
cc18e81abf | |
|
d2e5f7815e | |
|
48c9554a6c | |
|
0a0bc4f395 | |
|
b8969128d0 | |
|
4739c8921c | |
|
34ea41cc7f | |
|
ee5fba7664 | |
|
34e2995cd7 | |
|
51a2c1fbed | |
|
718ecdb04e | |
|
7ae84eb74c | |
|
d81a56f85b | |
|
27f6f9363f | |
|
1b35e0e24d | |
|
2c1ee35362 | |
|
447f70e9c7 | |
|
1809c5b6c0 | |
|
c552d5bba1 | |
|
3568a50f52 | |
|
436dceb68f | |
|
13be11668c | |
|
47a5015b07 | |
|
b0dde0f4fc | |
|
689cfa189c | |
|
bb3343c0c4 | |
|
b1d7d1d447 | |
|
256fefe0dd | |
|
11359412d4 | |
|
378249996e | |
|
12b7b27dda | |
|
720ba14043 | |
|
a69abee410 | |
|
399120c350 | |
|
4302d62c26 | |
|
8204fd5794 | |
|
d0474a3847 | |
|
14fd648920 | |
|
420ed9a467 | |
|
dc21cdf863 | |
|
b813ad7981 | |
|
415e21b68b | |
|
8b9ae348a0 | |
|
663cb85121 | |
|
9c771bf862 | |
|
13aaf6100f | |
|
46d69a3969 | |
|
081b9c3be5 | |
|
e4e0cdbd51 | |
|
ae7f68a9ac | |
|
836d5a7487 | |
|
02d3c0a99c | |
|
f750079c85 | |
|
0eb6675f13 | |
|
3a39b5cafc | |
|
8a0e087c4b | |
|
c910e69c12 | |
|
37e71d45af | |
|
9a8a1a2413 | |
|
2e805276bb | |
|
5d234f1e4a | |
|
badedd4968 | |
|
2cdb0b15ee | |
|
f27c7ae6d9 | |
|
d7a884b8cf | |
|
9336e20516 | |
|
7feb7435c2 | |
|
478b8d9d30 | |
|
1bd2fbdfe3 | |
|
d061d8061e | |
|
13f6c9fb53 | |
|
af1016e668 | |
|
74f8447d45 | |
|
3bf3cfd233 | |
|
428f06ed36 | |
|
b9ce71232f | |
|
36c2bc68e9 | |
|
df5c5e90ac | |
|
11026c20a3 | |
|
1f2ccedbfd | |
|
2c1a0c6c4c | |
|
fb6ba4a224 | |
|
f12157050c | |
|
4353f8c5b1 | |
|
86ddf63ac5 | |
|
948206e893 | |
|
c0112c254c | |
|
86660f745e | |
|
679575c7d1 | |
|
0e328d6db5 | |
|
71ede1b334 | |
|
1f5d6b5691 | |
|
f425d902df | |
|
d4f5d65014 | |
|
0a0d617ee9 | |
|
420d72a42e | |
|
907e840d64 | |
|
a19393dd92 | |
|
72ed4a5532 | |
|
99a94ca880 | |
|
25651a0a31 | |
|
47cf77670e | |
|
7ce27001a4 | |
|
d4314cc954 | |
|
92ed5911d6 | |
|
93455e8a08 | |
|
778e26b27c | |
|
3cd711bba5 | |
|
75c0f0bb47 | |
|
22a0e4db8f | |
|
22fcddc3c2 | |
|
dfdb3ffd29 | |
|
2441295d69 | |
|
d74cf63fb4 | |
|
b182b9ba96 | |
|
a5b7947fed | |
|
cac7b02d4f | |
|
4f066e397d | |
|
f7a85f3a80 | |
|
646016818c | |
|
851d152282 | |
|
9a08aa2aed | |
|
61556ac3e9 | |
|
e8b260f41d | |
|
8d8e12b3dd | |
|
a9eb5b1f12 | |
|
20df1f7904 | |
|
111991e6eb | |
|
67c74ffe7c | |
|
8b968401af | |
|
e368472ce7 | |
|
93962e6cf1 | |
|
32554b55cd | |
|
90da395f0a | |
|
2aea32e1a4 | |
|
3e8e4726f6 | |
|
cc10ff405a | |
|
77f63d7765 | |
|
71622bfde6 | |
|
723fbf1039 | |
|
d1a3503a7f | |
|
3a9c2d4675 | |
|
7244323cef | |
|
d6ec0981eb | |
|
c5b3a9a9e1 | |
|
475167d677 | |
|
d41b3455df | |
|
aba52cf01f | |
|
6abea9345e | |
|
b42bbe547b | |
|
d277f04f02 | |
|
d4fb87ec3c | |
|
6039ae9c96 | |
|
849ff94def | |
|
ac050a015d | |
|
10847d5e03 | |
|
b4b74c0ca9 | |
|
2da3679e46 | |
|
9bee18f881 | |
|
badfb3a09e | |
|
880840c20a | |
|
b6959491e3 | |
|
6dc87f5330 | |
|
0e134f9243 | |
|
ac96839c65 | |
|
8f61a71bf9 | |
|
f95465c2a5 | |
|
a5fb655295 | |
|
a2ccd7e494 | |
|
81fc66e54a | |
|
172a5357a2 | |
|
adda8b1c76 | |
|
983cf6575a | |
|
abcf6f4370 | |
|
62979df383 | |
|
c005bb4c47 | |
|
c0b7e90d1c | |
|
3816822eea | |
|
e37b001fec | |
|
0f199e3379 | |
|
59a21c91f4 | |
|
4583a89895 | |
|
bc78af7371 | |
|
68f51fc116 | |
|
368147bae7 | |
|
27e2dc2bea | |
|
53c909b9de | |
|
f6ffe2b535 | |
|
6e917d6f03 | |
|
08d932a1d4 | |
|
1e0ff5ac17 | |
|
75156208dd | |
|
41795aac2e | |
|
e5417ea731 | |
|
4e6b89ac8b | |
|
bd25741ea3 | |
|
1182675918 | |
|
394eeb9da7 | |
|
7861f60698 | |
|
6c7ab6cd3b | |
|
c038bce8c6 | |
|
4a63655328 | |
|
e419343eb4 | |
|
57f1c46889 | |
|
ac6b0d5ed0 | |
|
646fdac890 | |
|
36af60a819 | |
|
a2c7b99e2e | |
|
bbd4a0a1f2 | |
|
aa4ccb1e98 | |
|
63703d3191 | |
|
088ecd39f7 | |
|
fada0fa488 | |
|
a776353038 | |
|
881ffc3ad5 | |
|
8c9402f8b3 | |
|
8ff4776dfd | |
|
ddd1bae263 | |
|
5cf038f327 | |
|
c1bc95c88b | |
|
cbaa773fc3 | |
|
b62d664926 | |
|
467932a357 | |
|
3e3387fc97 | |
|
8746065b3a | |
|
d16c2bf941 | |
|
9f208b5cd6 | |
|
98ebefeea1 | |
|
9053f79f37 | |
|
62b9196f35 | |
|
ffb31fde7b | |
|
5245367ad4 | |
|
4521139d0f | |
|
169064aef8 | |
|
c8fc0c9247 | |
|
56579d1750 | |
|
96b9192fdc | |
|
bc50f835e5 | |
|
203c9e3b0a | |
|
75862d43aa | |
|
6806a5d8f7 | |
|
0fa6031d53 | |
|
379b197a0c | |
|
c9a8e43c5d | |
|
739eb91b78 | |
|
6b3f5ff3c7 | |
|
d3c8422700 | |
|
2a7f26ad53 | |
|
c4f89407ff | |
|
422ce67d75 | |
|
00f6c29ac2 | |
|
97a8d96277 | |
|
16faedda61 | |
|
4fad69c4be | |
|
5eeb0fe171 | |
|
13d4024e81 | |
|
fd88ae5ae0 | |
|
4d2cb35dfc | |
|
e66e6fafaa | |
|
76e6acc97c | |
|
ff3aab803f | |
|
85a6688a4e | |
|
afa597d2ab | |
|
c87ad16664 | |
|
75e4d3ed4f | |
|
9ea4519afa | |
|
fd707ba823 | |
|
939fe05553 | |
|
7c98d54184 | |
|
cd7a142baf | |
|
6cba956155 | |
|
d9fc524072 | |
|
27b353ce86 | |
|
fccddf1ce0 | |
|
8f15a04151 | |
|
f9a00a0876 | |
|
0b97dd7a6c | |
|
7fa5258631 | |
|
a1010972fb | |
|
3426cb890d | |
|
26f565c564 | |
|
b23f06d916 | |
|
49d322750a | |
|
b21c51cf1f | |
|
f774ca2aa2 | |
|
48ab491cc6 | |
|
25056207c3 | |
|
4ccd41a24a | |
|
52caed19d9 | |
|
ccedf33056 | |
|
d7bf502421 | |
|
7dfa5d11e4 | |
|
48039fba21 | |
|
345ede04c5 | |
|
f5c59a92ef | |
|
d71575858d | |
|
9431fef2bb | |
|
ba22c13a54 | |
|
90aaa537f4 | |
|
864c5c9b5f | |
|
be130dbaa1 | |
|
f926f3b540 | |
|
df7e562b2e | |
|
c564a9ed9b | |
|
69671f9d10 | |
|
f4f1069923 | |
|
cd13b74be4 | |
|
8edb596ab2 | |
|
979968704e | |
|
4f304babfc | |
|
de236cdc47 | |
|
118c39e3e7 | |
|
efcdd1b74e | |
|
267df4f115 | |
|
51c2e503a5 | |
|
dc835adf05 | |
|
03d3f9518f | |
|
a13e4b9f15 | |
|
a1dff1c110 | |
|
613728782b | |
|
d83a035f58 | |
|
d2192b5756 | |
|
b07ac29cde | |
|
6f59dd347f | |
|
c786cba1f3 | |
|
6f0e750b90 | |
|
d07b34acc8 | |
|
c3ae42ad03 | |
|
ed5bf885d6 | |
|
ba57b00378 | |
|
b0061236aa | |
|
86033d4605 | |
|
2d8f88d7d2 | |
|
ee40700dbe | |
|
8da19857fb | |
|
1a55eaaac6 | |
|
a08311041e | |
|
3fa52f8e04 | |
|
f8be3e6318 | |
|
613754c58d | |
|
6aa58cb715 | |
|
f88b97a33d | |
|
38fd0ec35f | |
|
83c2cad5b7 | |
|
b61c105e17 | |
|
ce065d4896 | |
|
380c4e140b | |
|
77c830bbe4 | |
|
eb1600b47a | |
|
97e6f2dc4f | |
|
39a9a0f338 | |
|
04c3b08e1d | |
|
21645ac2d0 | |
|
34791cc5cd | |
|
62735fbf70 | |
|
9748964ec9 | |
|
a0d276053e | |
|
1bca301ae5 | |
|
7ffc2271a5 | |
|
1ffa609d39 | |
|
ed872b6427 | |
|
513dee8e9b | |
|
065d49a4d5 | |
|
698fdb0263 | |
|
01c9a9d6b1 |
81
.cirrus.yml
81
.cirrus.yml
|
@ -5,18 +5,16 @@
|
||||||
# Global environment variables
|
# Global environment variables
|
||||||
env:
|
env:
|
||||||
# Name of the typical destination branch for PRs.
|
# Name of the typical destination branch for PRs.
|
||||||
DEST_BRANCH: "master"
|
DEST_BRANCH: "main"
|
||||||
|
|
||||||
|
|
||||||
|
# Execute all unit-tests in the repo
|
||||||
|
cirrus-ci/unit-test_task:
|
||||||
|
only_if: ¬_docs $CIRRUS_CHANGE_TITLE !=~ '.*CI:DOCS.*'
|
||||||
# Default task runtime environment
|
# Default task runtime environment
|
||||||
container:
|
container: &ci_container
|
||||||
dockerfile: ci/Dockerfile
|
dockerfile: ci/Dockerfile
|
||||||
cpu: 1
|
cpu: 1
|
||||||
memory: 1
|
memory: 1
|
||||||
|
|
||||||
|
|
||||||
# Execute all unit-tests in the repo
|
|
||||||
cirrus-ci/test_task:
|
|
||||||
env:
|
env:
|
||||||
CIRRUS_CLONE_DEPTH: 0
|
CIRRUS_CLONE_DEPTH: 0
|
||||||
script:
|
script:
|
||||||
|
@ -26,11 +24,70 @@ cirrus-ci/test_task:
|
||||||
test_output_artifacts:
|
test_output_artifacts:
|
||||||
path: '*.log'
|
path: '*.log'
|
||||||
|
|
||||||
|
cirrus-ci/renovate_validation_task:
|
||||||
|
only_if: *not_docs
|
||||||
|
container:
|
||||||
|
image: "ghcr.io/renovatebot/renovate:latest"
|
||||||
|
preset_validate_script:
|
||||||
|
- renovate-config-validator $CIRRUS_WORKING_DIR/renovate/defaults.json5
|
||||||
|
repo_validate_script:
|
||||||
|
- renovate-config-validator $CIRRUS_WORKING_DIR/.github/renovate.json5
|
||||||
|
|
||||||
|
# This is the same setup as used for Buildah CI
|
||||||
|
gcp_credentials: ENCRYPTED[fc95bcc9f4506a3b0d05537b53b182e104d4d3979eedbf41cf54205be6397ca0bce0831d0d47580cf578dae5776548a5]
|
||||||
|
|
||||||
|
cirrus-ci/build-push_test_task:
|
||||||
|
only_if: *not_docs
|
||||||
|
container: *ci_container
|
||||||
|
depends_on:
|
||||||
|
- cirrus-ci/unit-test
|
||||||
|
gce_instance:
|
||||||
|
cpu: 2
|
||||||
|
memory: "4Gb"
|
||||||
|
disk: 200 # Gigabytes, do not set less as per gcloud warning message
|
||||||
|
# re: I/O performance
|
||||||
|
# This repo. is subsequently used in and for building custom VM images
|
||||||
|
# in containers/automation_images. Avoid circular dependencies by using
|
||||||
|
# only stock, google-managed generic image. This also avoids needing to
|
||||||
|
# update custom-image last-used timestamps.
|
||||||
|
image_project: centos-cloud
|
||||||
|
image_family: centos-stream-9
|
||||||
|
timeout_in: 30
|
||||||
|
env:
|
||||||
|
CIMG: quay.io/buildah/stable:latest
|
||||||
|
TEST_FQIN: quay.io/buildah/do_not_use
|
||||||
|
# Robot account credentials for test-push to
|
||||||
|
# $TEST_FQIN registry by build-push/test/testbuilds.sh
|
||||||
|
BUILDAH_USERNAME: ENCRYPTED[53fd8becb599dda19f335d65cb067c46da3f0907eb83281a10554def11efc89925f7ca145ba7436afc3c32d936575142]
|
||||||
|
BUILDAH_PASSWORD: ENCRYPTED[aa6352251eba46e389e4cfc6e93eee3852008ecff67b940cba9197fd8bf95de15d498a6df2e7d5edef052e97d9b93bf0]
|
||||||
|
setup_script:
|
||||||
|
- dnf install -y podman
|
||||||
|
- bash build-push/test/qemusetup.sh
|
||||||
|
- >-
|
||||||
|
podman run --detach --name=buildah
|
||||||
|
--net=host --ipc=host --pid=host
|
||||||
|
--cgroupns=host --privileged
|
||||||
|
--security-opt label=disable
|
||||||
|
--security-opt seccomp=unconfined
|
||||||
|
--device /dev/fuse:rw
|
||||||
|
-v $PWD:$PWD:Z -w $PWD
|
||||||
|
-e BUILD_PUSH_TEST_BUILDS=true
|
||||||
|
-e CIRRUS_CI -e TEST_FQIN
|
||||||
|
-e BUILDAH_USERNAME -e BUILDAH_PASSWORD
|
||||||
|
$CIMG
|
||||||
|
sh -c 'while true ;do sleep 2h ; done'
|
||||||
|
- podman exec -i buildah dnf install -y jq skopeo
|
||||||
|
test_script:
|
||||||
|
- podman exec -i buildah ./build-push/test/run_all_tests.sh
|
||||||
|
|
||||||
|
|
||||||
# Represent primary Cirrus-CI based testing (Required for merge)
|
# Represent primary Cirrus-CI based testing (Required for merge)
|
||||||
cirrus-ci/success_task:
|
cirrus-ci/success_task:
|
||||||
depends_on:
|
container: *ci_container
|
||||||
- cirrus-ci/test
|
depends_on: &everything
|
||||||
|
- cirrus-ci/unit-test
|
||||||
|
- cirrus-ci/build-push_test
|
||||||
|
- cirrus-ci/renovate_validation
|
||||||
clone_script: mkdir -p "$CIRRUS_WORKING_DIR"
|
clone_script: mkdir -p "$CIRRUS_WORKING_DIR"
|
||||||
script: >-
|
script: >-
|
||||||
echo "Required for Action Workflow: https://github.com/${CIRRUS_REPO_FULL_NAME}/actions/runs/${GITHUB_CHECK_SUITE_ID}"
|
echo "Required for Action Workflow: https://github.com/${CIRRUS_REPO_FULL_NAME}/actions/runs/${GITHUB_CHECK_SUITE_ID}"
|
||||||
|
@ -43,15 +100,15 @@ cirrus-ci/success_task:
|
||||||
# fire since the manual task has dependencies that cannot be
|
# fire since the manual task has dependencies that cannot be
|
||||||
# satisfied.
|
# satisfied.
|
||||||
github-actions/success_task:
|
github-actions/success_task:
|
||||||
|
container: *ci_container
|
||||||
# Note: ***DO NOT*** manually trigger this task under normal circumstances.
|
# Note: ***DO NOT*** manually trigger this task under normal circumstances.
|
||||||
# It is triggered automatically by the cirrus-ci_retrospective
|
# It is triggered automatically by the cirrus-ci_retrospective
|
||||||
# Github Action. This action is responsible for testing the PR changes
|
# Github Action. This action is responsible for testing the PR changes
|
||||||
# to the action itself.
|
# to the action itself.
|
||||||
trigger_type: manual
|
trigger_type: manual
|
||||||
# Only required for PRs, never tag or branch testing
|
# Only required for PRs, never tag or branch testing
|
||||||
only_if: $CIRRUS_BRANCH != $DEST_BRANCH
|
only_if: $CIRRUS_CHANGE_TITLE !=~ '.*CI:DOCS.*' && $CIRRUS_PR != ''
|
||||||
depends_on:
|
depends_on: *everything
|
||||||
- cirrus-ci/test
|
|
||||||
clone_script: mkdir -p "$CIRRUS_WORKING_DIR"
|
clone_script: mkdir -p "$CIRRUS_WORKING_DIR"
|
||||||
script: >-
|
script: >-
|
||||||
echo "Triggered by Github Action Workflow: https://github.com/${CIRRUS_REPO_FULL_NAME}/actions/runs/${GITHUB_CHECK_SUITE_ID}"
|
echo "Triggered by Github Action Workflow: https://github.com/${CIRRUS_REPO_FULL_NAME}/actions/runs/${GITHUB_CHECK_SUITE_ID}"
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*
|
||||||
|
Renovate is a service similar to GitHub Dependabot, but with
|
||||||
|
(fantastically) more configuration options. So many options
|
||||||
|
in fact, if you're new I recommend glossing over this cheat-sheet
|
||||||
|
prior to the official documentation:
|
||||||
|
|
||||||
|
https://www.augmentedmind.de/2021/07/25/renovate-bot-cheat-sheet
|
||||||
|
|
||||||
|
Configuration Update/Change Procedure:
|
||||||
|
1. Make changes
|
||||||
|
2. Manually validate changes (from repo-root):
|
||||||
|
|
||||||
|
podman run -it \
|
||||||
|
-v ./.github/renovate.json5:/usr/src/app/renovate.json5:z \
|
||||||
|
ghcr.io/renovatebot/renovate:latest \
|
||||||
|
renovate-config-validator
|
||||||
|
3. Commit.
|
||||||
|
|
||||||
|
Configuration Reference:
|
||||||
|
https://docs.renovatebot.com/configuration-options/
|
||||||
|
|
||||||
|
Monitoring Dashboard:
|
||||||
|
https://app.renovatebot.com/dashboard#github/containers
|
||||||
|
|
||||||
|
Note: The Renovate bot will create/manage it's business on
|
||||||
|
branches named 'renovate/*'. Otherwise, and by
|
||||||
|
default, the only the copy of this file that matters
|
||||||
|
is the one on the `main` branch. No other branches
|
||||||
|
will be monitored or touched in any way.
|
||||||
|
*/
|
||||||
|
|
||||||
|
{
|
||||||
|
/*************************************************
|
||||||
|
****** Global/general configuration options *****
|
||||||
|
*************************************************/
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
// Re-use predefined sets of configuration options to DRY
|
||||||
|
"extends": [
|
||||||
|
// https://github.com/containers/automation/blob/main/renovate/defaults.json5
|
||||||
|
"github>containers/automation//renovate/defaults.json5"
|
||||||
|
],
|
||||||
|
/*************************************************
|
||||||
|
*** Repository-specific configuration options ***
|
||||||
|
*************************************************/
|
||||||
|
}
|
|
@ -16,11 +16,11 @@ env:
|
||||||
ACTIONS_STEP_DEBUG: '${{ secrets.ACTIONS_STEP_DEBUG }}'
|
ACTIONS_STEP_DEBUG: '${{ secrets.ACTIONS_STEP_DEBUG }}'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
unit-test:
|
helper_unit-test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Clone the repository code
|
- name: Clone the repository code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
path: ./
|
path: ./
|
||||||
|
|
|
@ -43,20 +43,20 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
- name: Clone latest master branch repository code
|
- name: Clone latest main branch repository code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
path: ./master
|
path: ./main
|
||||||
# DO NOT build-in any unnecessary permissions
|
# DO NOT build-in any unnecessary permissions
|
||||||
persist-credentials: 'false'
|
persist-credentials: 'false'
|
||||||
|
|
||||||
- name: Load cirrus-ci_retrospective JSON and set action output variables
|
- name: Load cirrus-ci_retrospective JSON and set action output variables
|
||||||
id: retro
|
id: retro
|
||||||
env:
|
env:
|
||||||
DEBUG: 1
|
A_DEBUG: 1
|
||||||
run: |
|
run: |
|
||||||
source ./master/$HELPER_LIB
|
source ./main/$HELPER_LIB
|
||||||
load_ccir $GITHUB_WORKSPACE
|
load_ccir $GITHUB_WORKSPACE
|
||||||
set_ccir
|
set_ccir
|
||||||
|
|
||||||
|
@ -64,27 +64,25 @@ jobs:
|
||||||
- if: steps.retro.outputs.do_intg == 'true'
|
- if: steps.retro.outputs.do_intg == 'true'
|
||||||
id: create_pr_comment
|
id: create_pr_comment
|
||||||
name: Create a status comment in the PR
|
name: Create a status comment in the PR
|
||||||
# Ref: https://github.com/marketplace/actions/comment-action
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
uses: jungwinter/comment@v1
|
|
||||||
with:
|
with:
|
||||||
issue_number: '${{ steps.retro.outputs.prn }}'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
type: 'create'
|
comment-tag: retro
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
|
||||||
# N/B: At the time of this comment, it is not possible to provide
|
# N/B: At the time of this comment, it is not possible to provide
|
||||||
# direct links to specific job-steps (here) nor links to artifact
|
# direct links to specific job-steps (here) nor links to artifact
|
||||||
# files. There are open RFE's for this capability to be added.
|
# files. There are open RFE's for this capability to be added.
|
||||||
body: >-
|
message: >-
|
||||||
[Cirrus-CI Retrospective Github
|
[Cirrus-CI Retrospective Github
|
||||||
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
||||||
has started. Running against
|
has started. Running against
|
||||||
[${{ steps.retro.outputs.sha }}](https://github.com/${{github.repository}}/pull/${{steps.retro.outputs.prn}}/commits/${{steps.retro.outputs.sha}})
|
[${{ steps.retro.outputs.sha }}](https://github.com/${{github.repository}}/pull/${{steps.retro.outputs.prn}}/commits/${{steps.retro.outputs.sha}})
|
||||||
in this pull request.
|
in this pull request.
|
||||||
|
|
||||||
# Since we're executing from the master branch, github will silently
|
# Since we're executing from the main branch, github will silently
|
||||||
# block allow direct checkout of PR code.
|
# block allow direct checkout of PR code.
|
||||||
- if: steps.retro.outputs.do_intg == 'true'
|
- if: steps.retro.outputs.do_intg == 'true'
|
||||||
name: Clone all repository code
|
name: Clone all repository code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Get ALL available history to avoid problems during any run of
|
# Get ALL available history to avoid problems during any run of
|
||||||
# 'git describe' from any script in the repo.
|
# 'git describe' from any script in the repo.
|
||||||
|
@ -94,7 +92,7 @@ jobs:
|
||||||
# DO NOT build-in any unnecessary permissions
|
# DO NOT build-in any unnecessary permissions
|
||||||
persist-credentials: 'false'
|
persist-credentials: 'false'
|
||||||
|
|
||||||
# This workflow always runs from the master branch, this is not helpful
|
# This workflow always runs from the main branch, this is not helpful
|
||||||
# for PR authors wanting to change the container or script's behavior.
|
# for PR authors wanting to change the container or script's behavior.
|
||||||
# Clone down a copy of the code from the PR, so it may be utilized for
|
# Clone down a copy of the code from the PR, so it may be utilized for
|
||||||
# a test-build and secondary execution of cirrus-ci_retrospective
|
# a test-build and secondary execution of cirrus-ci_retrospective
|
||||||
|
@ -119,12 +117,11 @@ jobs:
|
||||||
- if: steps.retro.outputs.do_intg == 'true'
|
- if: steps.retro.outputs.do_intg == 'true'
|
||||||
id: edit_pr_comment_build
|
id: edit_pr_comment_build
|
||||||
name: Update status comment on PR
|
name: Update status comment on PR
|
||||||
uses: jungwinter/comment@v1
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
with:
|
with:
|
||||||
type: 'edit'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
comment_id: '${{ steps.create_pr_comment.outputs.id }}'
|
comment-tag: retro
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
message: >-
|
||||||
body: >-
|
|
||||||
Unit-testing passed (`${{ env.HELPER_LIB_TEST }}`)passed.
|
Unit-testing passed (`${{ env.HELPER_LIB_TEST }}`)passed.
|
||||||
[Cirrus-CI Retrospective Github
|
[Cirrus-CI Retrospective Github
|
||||||
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
||||||
|
@ -135,12 +132,11 @@ jobs:
|
||||||
- if: steps.retro.outputs.do_intg == 'true'
|
- if: steps.retro.outputs.do_intg == 'true'
|
||||||
id: edit_pr_comment_exec
|
id: edit_pr_comment_exec
|
||||||
name: Update status comment on PR again
|
name: Update status comment on PR again
|
||||||
uses: jungwinter/comment@v1
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
with:
|
with:
|
||||||
type: 'edit'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
comment_id: '${{ steps.edit_pr_comment_build.outputs.id }}'
|
comment-tag: retro
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
message: >-
|
||||||
body: >-
|
|
||||||
Smoke testing passed [Cirrus-CI Retrospective Github
|
Smoke testing passed [Cirrus-CI Retrospective Github
|
||||||
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
||||||
is triggering Cirrus-CI ${{ env.ACTION_TASK }} task.
|
is triggering Cirrus-CI ${{ env.ACTION_TASK }} task.
|
||||||
|
@ -154,12 +150,12 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
set +x
|
set +x
|
||||||
trap "history -c" EXIT
|
trap "history -c" EXIT
|
||||||
curl --request POST \
|
curl --fail-with-body --request POST \
|
||||||
--url https://api.cirrus-ci.com/graphql \
|
--url https://api.cirrus-ci.com/graphql \
|
||||||
--header "Authorization: Bearer ${{ secrets.CIRRUS_API_TOKEN }}" \
|
--header "Authorization: Bearer ${{ secrets.CIRRUS_API_TOKEN }}" \
|
||||||
--header 'content-type: application/json' \
|
--header 'content-type: application/json' \
|
||||||
--data '{"query":"mutation {\n trigger(input: {taskId: \"${{steps.retro.outputs.tid}}\", clientMutationId: \"${{env.UUID}}\"}) {\n clientMutationId\n task {\n name\n }\n }\n}"}' \
|
--data '{"query":"mutation {\n trigger(input: {taskId: \"${{steps.retro.outputs.tid}}\", clientMutationId: \"${{env.UUID}}\"}) {\n clientMutationId\n task {\n name\n }\n }\n}"}' \
|
||||||
> ./test_artifacts/action_task_trigger.json
|
| tee ./test_artifacts/action_task_trigger.json
|
||||||
|
|
||||||
actual=$(jq --raw-output '.data.trigger.clientMutationId' ./test_artifacts/action_task_trigger.json)
|
actual=$(jq --raw-output '.data.trigger.clientMutationId' ./test_artifacts/action_task_trigger.json)
|
||||||
echo "Verifying '$UUID' matches returned tracking value '$actual'"
|
echo "Verifying '$UUID' matches returned tracking value '$actual'"
|
||||||
|
@ -167,12 +163,11 @@ jobs:
|
||||||
|
|
||||||
- if: steps.retro.outputs.do_intg == 'true'
|
- if: steps.retro.outputs.do_intg == 'true'
|
||||||
name: Update comment on workflow success
|
name: Update comment on workflow success
|
||||||
uses: jungwinter/comment@v1
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
with:
|
with:
|
||||||
type: 'edit'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
comment_id: '${{ steps.edit_pr_comment_exec.outputs.id }}'
|
comment-tag: retro
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
message: >-
|
||||||
body: >-
|
|
||||||
Successfully triggered [${{ env.ACTION_TASK }}
|
Successfully triggered [${{ env.ACTION_TASK }}
|
||||||
task](https://cirrus-ci.com/task/${{ steps.retro.outputs.tid }}?command=main#L0)
|
task](https://cirrus-ci.com/task/${{ steps.retro.outputs.tid }}?command=main#L0)
|
||||||
to indicate
|
to indicate
|
||||||
|
@ -183,12 +178,11 @@ jobs:
|
||||||
|
|
||||||
- if: failure() && steps.retro.outputs.do_intg == 'true'
|
- if: failure() && steps.retro.outputs.do_intg == 'true'
|
||||||
name: Update comment on workflow failure
|
name: Update comment on workflow failure
|
||||||
uses: jungwinter/comment@v1
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
with:
|
with:
|
||||||
type: 'edit'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
comment_id: '${{ steps.create_pr_comment.outputs.id }}'
|
comment-tag: retro
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
message: >-
|
||||||
body: >-
|
|
||||||
Failure running [Cirrus-CI Retrospective Github
|
Failure running [Cirrus-CI Retrospective Github
|
||||||
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
Action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
||||||
failed against this PR's
|
failed against this PR's
|
||||||
|
@ -197,24 +191,22 @@ jobs:
|
||||||
# This can happen because of --force push, manual cancel button press, or some other cause.
|
# This can happen because of --force push, manual cancel button press, or some other cause.
|
||||||
- if: cancelled() && steps.retro.outputs.do_intg == 'true'
|
- if: cancelled() && steps.retro.outputs.do_intg == 'true'
|
||||||
name: Update comment on workflow cancellation
|
name: Update comment on workflow cancellation
|
||||||
uses: jungwinter/comment@v1
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
with:
|
with:
|
||||||
type: 'edit'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
comment_id: '${{ steps.create_pr_comment.outputs.id }}'
|
comment-tag: retro
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
message: '[Cancelled](https://github.com/${{github.repository}}/pull/${{steps.retro.outputs.prn}}/commits/${{steps.retro.outputs.sha}})'
|
||||||
body: '[Cancelled](https://github.com/${{github.repository}}/pull/${{steps.retro.outputs.prn}}/commits/${{steps.retro.outputs.sha}})'
|
|
||||||
|
|
||||||
# Abnormal workflow ($ACTION-TASK task already ran / not paused on a PR).
|
# Abnormal workflow ($ACTION-TASK task already ran / not paused on a PR).
|
||||||
- if: steps.retro.outputs.is_pr == 'true' && steps.retro.outputs.do_intg != 'true'
|
- if: steps.retro.outputs.is_pr == 'true' && steps.retro.outputs.do_intg != 'true'
|
||||||
id: create_error_pr_comment
|
id: create_error_pr_comment
|
||||||
name: Create an error status comment in the PR
|
name: Create an error status comment in the PR
|
||||||
# Ref: https://github.com/marketplace/actions/comment-action
|
# Ref: https://github.com/marketplace/actions/comment-action
|
||||||
uses: jungwinter/comment@v1
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
with:
|
with:
|
||||||
issue_number: '${{ steps.retro.outputs.prn }}'
|
pr-number: '${{ steps.retro.outputs.prn }}'
|
||||||
type: 'create'
|
comment-tag: error
|
||||||
token: '${{ secrets.GITHUB_TOKEN }}'
|
message: >-
|
||||||
body: >-
|
|
||||||
***ERROR***: [cirrus-ci_retrospective
|
***ERROR***: [cirrus-ci_retrospective
|
||||||
action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
action](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})
|
||||||
found `${{ env.ACTION_TASK }}` task with unexpected `${{ steps.retro.outputs.tst }}`
|
found `${{ env.ACTION_TASK }}` task with unexpected `${{ steps.retro.outputs.tst }}`
|
||||||
|
@ -230,7 +222,7 @@ jobs:
|
||||||
# Provide an archive of files for debugging/analysis.
|
# Provide an archive of files for debugging/analysis.
|
||||||
- if: always() && steps.retro.outputs.do_intg == 'true'
|
- if: always() && steps.retro.outputs.do_intg == 'true'
|
||||||
name: Archive event, build, and debugging output
|
name: Archive event, build, and debugging output
|
||||||
uses: actions/upload-artifact@v1.0.0
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pr_${{ steps.retro.outputs.prn }}_debug.zip
|
name: pr_${{ steps.retro.outputs.prn }}_debug.zip
|
||||||
path: ./test_artifacts
|
path: ./test_artifacts
|
||||||
|
|
|
@ -27,16 +27,21 @@ jobs:
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
unit-tests:
|
unit-tests: # N/B: Duplicates `ubuntu_unit_tests.yml` - templating not supported
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Testing installer requires a full repo. history
|
# Testing installer requires a full repo. history
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
path: ./
|
path: ./
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get -qq update
|
||||||
|
sudo apt-get -qq -y install libtest-differences-perl libyaml-libyaml-perl
|
||||||
|
|
||||||
- name: Execute helper library unit-tests using code from PR
|
- name: Execute helper library unit-tests using code from PR
|
||||||
run: |
|
run: |
|
||||||
$GITHUB_WORKSPACE/$HELPER_LIB_TEST
|
$GITHUB_WORKSPACE/$HELPER_LIB_TEST
|
||||||
|
@ -61,18 +66,18 @@ jobs:
|
||||||
# context data.
|
# context data.
|
||||||
- id: get_tag
|
- id: get_tag
|
||||||
name: Retrieve the tag name
|
name: Retrieve the tag name
|
||||||
run: printf "::set-output name=TAG_NAME::%s\n" $(basename "$GITHUB_REF")
|
run: printf "TAG_NAME=%s\n" $(basename "$GITHUB_REF") >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- id: create_release # Pre-req for upload-release-asset below
|
- id: create_release # Pre-req for upload-release-asset below
|
||||||
name: Create a new Github Release item for tag
|
name: Create a new Github Release item for tag
|
||||||
uses: actions/create-release@v1.0.1
|
uses: actions/create-release@v1.1.4
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ steps.get_tag.outputs.TAG_NAME }}
|
tag_name: ${{ steps.get_tag.outputs.TAG_NAME }}
|
||||||
release_name: ${{ steps.get_tag.outputs.TAG_NAME }}
|
release_name: ${{ steps.get_tag.outputs.TAG_NAME }}
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
path: ./
|
path: ./
|
||||||
|
@ -97,7 +102,7 @@ jobs:
|
||||||
REPO_USER: libpod
|
REPO_USER: libpod
|
||||||
REPO_NAME: cirrus-ci_retrospective
|
REPO_NAME: cirrus-ci_retrospective
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
path: ./
|
path: ./
|
||||||
|
@ -123,7 +128,7 @@ jobs:
|
||||||
|
|
||||||
- name: Retrieve the tag name
|
- name: Retrieve the tag name
|
||||||
id: get_tag
|
id: get_tag
|
||||||
run: printf "::set-output name=TAG_NAME::%s\n" $(basename "$GITHUB_REF" | tee /dev/stderr)
|
run: printf "TAG_NAME=%s\n" $(basename "$GITHUB_REF" | tee /dev/stderr) >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Tag and push cirrus-ci_retrospective container image to registry
|
- name: Tag and push cirrus-ci_retrospective container image to registry
|
||||||
run: |
|
run: |
|
||||||
|
@ -140,7 +145,7 @@ jobs:
|
||||||
run: jq --indent 4 --color-output . ${{ github.event_path }}
|
run: jq --indent 4 --color-output . ${{ github.event_path }}
|
||||||
|
|
||||||
- if: always()
|
- if: always()
|
||||||
uses: actions/upload-artifact@v1.0.0
|
uses: actions/upload-artifact@v4.6.2
|
||||||
name: Archive triggering event JSON
|
name: Archive triggering event JSON
|
||||||
with:
|
with:
|
||||||
name: event.json.zip
|
name: event.json.zip
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
---
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
automation_unit-tests:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
persist-credentials: false
|
||||||
|
path: ./
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get -qq update
|
||||||
|
sudo apt-get -qq -y install libtest-differences-perl libyaml-libyaml-perl
|
||||||
|
|
||||||
|
- name: Fetch all repository tags
|
||||||
|
run: git fetch --tags --force
|
||||||
|
|
||||||
|
- name: Execute all unit-tests
|
||||||
|
run: $GITHUB_WORKSPACE/bin/run_all_tests.sh
|
|
@ -0,0 +1 @@
|
||||||
|
__pycache__
|
|
@ -1,3 +1,3 @@
|
||||||
## The Automation Scrips for Containers Project Community Code of Conduct
|
## The Automation Scrips for Containers Project Community Code of Conduct
|
||||||
|
|
||||||
The Automation Scrips for Containers Project follows the [Containers Community Code of Conduct](https://github.com/containers/common/blob/master/CODE-OF-CONDUCT.md).
|
The Automation Scrips for Containers Project follows the [Containers Community Code of Conduct](https://github.com/containers/common/blob/main/CODE-OF-CONDUCT.md).
|
||||||
|
|
144
README.md
144
README.md
|
@ -1,38 +1,5 @@
|
||||||
# Automation scripts, libraries for re-use in other repositories
|
# Automation scripts, libraries for re-use in other repositories
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
During build of an environment (VM, container image, etc), execute *any version*
|
|
||||||
of [the install
|
|
||||||
script](https://github.com/containers/automation/releases/download/latest/install_automation.sh),
|
|
||||||
preferably as root. The script ***must*** be passed the version number of [the project
|
|
||||||
release to install](https://github.com/containers/automation/releases). Before making
|
|
||||||
changes to the environment, the script will first download and then re-execute
|
|
||||||
the requested version of itself.
|
|
||||||
|
|
||||||
For example, to install the `v1.0.0` release, run:
|
|
||||||
```sh
|
|
||||||
url='https://github.com/containers/automation/releases/latest/download/install_automation.sh'
|
|
||||||
curl -sL "$url" | bash -s 1.1.3
|
|
||||||
```
|
|
||||||
|
|
||||||
The basic install consists of copying the contents of the `common` (subdirectory) and
|
|
||||||
the installer script into a central location on the system. A global shell variable
|
|
||||||
(`$AUTOMATION_LIB_PATH`) is set so that any dependent scripts can easily access the
|
|
||||||
installed files.
|
|
||||||
|
|
||||||
## Alt. Usage
|
|
||||||
|
|
||||||
If a clone of the repository is already available locally, the installer can be invoked
|
|
||||||
with the magic version number '0.0.0'. Note that, while it will install the files
|
|
||||||
from the local clone as-is, the installer still needs to reach out to github to
|
|
||||||
retrieve tree-history details. This is required for the installer to properly
|
|
||||||
set the actual version-number as part of the process.
|
|
||||||
|
|
||||||
Though not recommended at all, it is also possible to specify the version as
|
|
||||||
`latest`. This will clone down whatever happens to be on the master branch
|
|
||||||
at the time. Though it will probably work, it's best for stability to specify
|
|
||||||
an explicit released version.
|
|
||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
|
@ -44,6 +11,82 @@ system packages (or their equivalents):
|
||||||
* git
|
* git
|
||||||
* install
|
* install
|
||||||
|
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
During build of an environment (VM, container image, etc), execute *any version*
|
||||||
|
of [the install
|
||||||
|
script](https://github.com/containers/automation/releases/download/latest/install_automation.sh),
|
||||||
|
preferably as root. The script ***must*** be passed the version number of [the project
|
||||||
|
release to install](https://github.com/containers/automation/releases). Alternatively
|
||||||
|
it may be passed `latest` to install the HEAD of the main branch.
|
||||||
|
|
||||||
|
For example, to install the `v1.1.3` release, run:
|
||||||
|
```bash
|
||||||
|
~# url='https://raw.githubusercontent.com/containers/automation/master/bin/install_automation.sh'
|
||||||
|
~# curl -sL "$url" | bash -s 1.1.3
|
||||||
|
```
|
||||||
|
|
||||||
|
To install `latest`, run:
|
||||||
|
```bash
|
||||||
|
~# url='https://raw.githubusercontent.com/containers/automation/master/bin/install_automation.sh'
|
||||||
|
~# curl -sL "$url" | bash -s latest
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alt. Installation
|
||||||
|
|
||||||
|
If you're leery of piping to bash and/or a local clone of the repository is already
|
||||||
|
available locally, the installer can be invoked with the *magic version* '0.0.0'.
|
||||||
|
Note this will limit the install to the local clone (as-is). The installer script
|
||||||
|
will still reach out to github.com to retrieve version information. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~# cd /path/to/clone
|
||||||
|
/path/to/clone# ./bin/install_automation.sh 0.0.0
|
||||||
|
```
|
||||||
|
|
||||||
|
### Component installation
|
||||||
|
|
||||||
|
The installer may also be passed the names of one or more components to
|
||||||
|
install system-wide. Available components are simply any subdirectory in the repo
|
||||||
|
which contain a `.install.sh` file. For example, to install the latest `build-push` system-wide run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~# url='https://raw.githubusercontent.com/containers/automation/master/bin/install_automation.sh'
|
||||||
|
~# curl -sL "$url" | bash -s latest build-push
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
The basic install consists of copying the contents of the `common` (subdirectory) and
|
||||||
|
the installer script into a central location on the system. Because this location
|
||||||
|
can vary by platform, a global shell variable `$AUTOMATION_LIB_PATH` is established
|
||||||
|
by a central configuration at install-time. It is highly recommended that all
|
||||||
|
callers explicitly load and export the contents of the file
|
||||||
|
`/etc/automation_environment` before making use of the common library or any
|
||||||
|
components. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -a
|
||||||
|
if [[ -r "/etc/automation_environment" ]]; then
|
||||||
|
source /etc/automation_environment
|
||||||
|
fi
|
||||||
|
set +a
|
||||||
|
|
||||||
|
if [[ -n "$AUTOMATION_LIB_PATH" ]]; then
|
||||||
|
source $AUTOMATION_LIB_PATH/common_lib.sh
|
||||||
|
else
|
||||||
|
(
|
||||||
|
echo "WARNING: It doesn't appear containers/automation common was installed."
|
||||||
|
) >> /dev/stderr
|
||||||
|
fi
|
||||||
|
|
||||||
|
...do stuff...
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Subdirectories
|
## Subdirectories
|
||||||
|
|
||||||
### `.github/workflows`
|
### `.github/workflows`
|
||||||
|
@ -52,16 +95,45 @@ Directory containing workflows for Github Actions.
|
||||||
|
|
||||||
### `bin`
|
### `bin`
|
||||||
|
|
||||||
Ths directory contains scripts intended for execution under multiple environments,
|
This directory contains scripts intended for execution under multiple environments,
|
||||||
pertaining to operations on this whole repository. For example, executing all
|
pertaining to operations on this whole repository. For example, executing all
|
||||||
unit tests, installing components, etc.
|
unit tests, installing components, etc.
|
||||||
|
|
||||||
|
### `build-push`
|
||||||
|
|
||||||
|
Handy automation too to help with parallel building and pushing container images,
|
||||||
|
including support for multi-arch (via QEMU emulation). See the
|
||||||
|
[README.md file in the subdirectory](build-push/README.md) for more information.
|
||||||
|
|
||||||
|
### `cirrus-ci_artifacts`
|
||||||
|
|
||||||
|
Handy python script that may be used to download artifacts from any build,
|
||||||
|
based on knowing its ID. Downloads will be stored properly nested, by task
|
||||||
|
name and artifact so there are no name clashes.
|
||||||
|
|
||||||
|
### `cirrus-ci_env`
|
||||||
|
|
||||||
|
Python script used to minimally parse `.cirrus.yml` tasks as written/formatted
|
||||||
|
in other containers projects. This is not intended to be used directly, but
|
||||||
|
called by other scripts to help extract env. var. values from matrix tasks.
|
||||||
|
|
||||||
|
### `cirrus-ci_retrospective`
|
||||||
|
|
||||||
|
See the [README.md file in the subdirectory](cirrus-ci_retrospective/README.md) for more information.
|
||||||
|
|
||||||
|
### `cirrus-task-map`
|
||||||
|
|
||||||
|
Handy script that parses a `.cirrus.yml` and outputs an flow-diagram to illustrate
|
||||||
|
task dependencies. Useful for visualizing complex configurations, like that of
|
||||||
|
`containers/podman`.
|
||||||
|
|
||||||
### `common`
|
### `common`
|
||||||
|
|
||||||
This directory contains general-purpose scripts, libraries, and their unit-tests.
|
This directory contains general-purpose scripts, libraries, and their unit-tests.
|
||||||
They're intended to be used individually or as a whole from within automation of
|
They're intended to be used individually or as a whole from within automation of
|
||||||
other repositories.
|
other repositories.
|
||||||
|
|
||||||
### `cirrus-ci_retrospective`
|
### `github`
|
||||||
|
|
||||||
See the [README.md file in the subdirectory](cirrus-ci_retrospective/README.md) for more information
|
Contains some helper scripts/libraries for using `cirrus-ci_retrospective` from
|
||||||
|
within github-actions workflow. Not intended to be used otherwise.
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
## Security and Disclosure Information Policy for the Automation Scripts for Containers Project
|
## Security and Disclosure Information Policy for the Automation Scripts for Containers Project
|
||||||
|
|
||||||
The Automation Scripts for Containers Project follows the [Security and Disclosure Information Policy](https://github.com/containers/common/blob/master/SECURITY.md) for the Containers Projects.
|
The Automation Scripts for Containers Project follows the [Security and Disclosure Information Policy](https://github.com/containers/common/blob/main/SECURITY.md) for the Containers Projects.
|
||||||
|
|
|
@ -15,7 +15,7 @@ set +x
|
||||||
# install
|
# install
|
||||||
|
|
||||||
AUTOMATION_REPO_URL=${AUTOMATION_REPO_URL:-https://github.com/containers/automation.git}
|
AUTOMATION_REPO_URL=${AUTOMATION_REPO_URL:-https://github.com/containers/automation.git}
|
||||||
AUTOMATION_REPO_BRANCH=${AUTOMATION_REPO_BRANCH:-master}
|
AUTOMATION_REPO_BRANCH=${AUTOMATION_REPO_BRANCH:-main}
|
||||||
# This must be hard-coded for executing via pipe to bash
|
# This must be hard-coded for executing via pipe to bash
|
||||||
SCRIPT_FILENAME=install_automation.sh
|
SCRIPT_FILENAME=install_automation.sh
|
||||||
# When non-empty, contains the installation source-files
|
# When non-empty, contains the installation source-files
|
||||||
|
@ -24,7 +24,7 @@ INSTALLATION_SOURCE="${INSTALLATION_SOURCE:-}"
|
||||||
AUTOMATION_VERSION="$1"
|
AUTOMATION_VERSION="$1"
|
||||||
shift || true # ignore if no more args
|
shift || true # ignore if no more args
|
||||||
# Set non-zero to enable
|
# Set non-zero to enable
|
||||||
DEBUG=${DEBUG:-0}
|
A_DEBUG=${A_DEBUG:-0}
|
||||||
# Save some output eyestrain (if script can be found)
|
# Save some output eyestrain (if script can be found)
|
||||||
OOE=$(realpath $(dirname "${BASH_SOURCE[0]}")/../common/bin/ooe.sh 2>/dev/null || echo "")
|
OOE=$(realpath $(dirname "${BASH_SOURCE[0]}")/../common/bin/ooe.sh 2>/dev/null || echo "")
|
||||||
# Sentinel value representing whatever version is present in the local repository
|
# Sentinel value representing whatever version is present in the local repository
|
||||||
|
@ -33,14 +33,32 @@ MAGIC_LOCAL_VERSION='0.0.0'
|
||||||
DEFAULT_INSTALL_PREFIX=/usr/local/share
|
DEFAULT_INSTALL_PREFIX=/usr/local/share
|
||||||
INSTALL_PREFIX="${INSTALL_PREFIX:-$DEFAULT_INSTALL_PREFIX}"
|
INSTALL_PREFIX="${INSTALL_PREFIX:-$DEFAULT_INSTALL_PREFIX}"
|
||||||
INSTALL_PREFIX="${INSTALL_PREFIX%%/}" # Make debugging path problems easier
|
INSTALL_PREFIX="${INSTALL_PREFIX%%/}" # Make debugging path problems easier
|
||||||
|
# When installing as root, allow sourcing env. vars. from this file
|
||||||
|
INSTALL_ENV_FILEPATH="${INSTALL_ENV_FILEPATH:-/etc/automation_environment}"
|
||||||
# Used internally here and in unit-testing, do not change without a really, really good reason.
|
# Used internally here and in unit-testing, do not change without a really, really good reason.
|
||||||
_ARGS="$@"
|
_ARGS="$*"
|
||||||
_MAGIC_JUJU=${_MAGIC_JUJU:-XXXXX}
|
_MAGIC_JUJU=${_MAGIC_JUJU:-XXXXX}
|
||||||
_DEFAULT_MAGIC_JUJU=d41d844b68a14ee7b9e6a6bb88385b4d
|
_DEFAULT_MAGIC_JUJU=d41d844b68a14ee7b9e6a6bb88385b4d
|
||||||
|
|
||||||
msg() { echo -e "${1:-No Message given}" > /dev/stderr; }
|
msg() { echo -e "${1:-No Message given}"; }
|
||||||
|
|
||||||
dbg() { if ((DEBUG)); then msg "\n# $1"; fi }
|
dbg() { if ((A_DEBUG)); then msg "\n# $1"; fi }
|
||||||
|
|
||||||
|
# On 5/14/2021 the default branch was renamed to 'main'.
|
||||||
|
# Since prior versions of the installer reference the old
|
||||||
|
# default branch, the version-specific installer could fail.
|
||||||
|
# Work around this with some inline editing of the downloaded
|
||||||
|
# script, before re-exec()ing it.
|
||||||
|
fix_branch_ref() {
|
||||||
|
local filepath="$1"
|
||||||
|
if [[ ! -w "$filepath" ]]; then
|
||||||
|
msg "Error updating default branch name in installer script at '$filepath'"
|
||||||
|
exit 19
|
||||||
|
fi
|
||||||
|
sed -i -r -e \
|
||||||
|
's/^(AUTOMATION_REPO_BRANCH.+)master/\1main/' \
|
||||||
|
"$filepath"
|
||||||
|
}
|
||||||
|
|
||||||
# System-wide access to special environment, not used during installer testing.
|
# System-wide access to special environment, not used during installer testing.
|
||||||
install_environment() {
|
install_environment() {
|
||||||
|
@ -50,24 +68,11 @@ install_environment() {
|
||||||
inst_perm_arg="-o root -g root"
|
inst_perm_arg="-o root -g root"
|
||||||
fi
|
fi
|
||||||
install -v $inst_perm_arg -D -t "$INSTALL_PREFIX/automation/" "$INSTALLATION_SOURCE/environment"
|
install -v $inst_perm_arg -D -t "$INSTALL_PREFIX/automation/" "$INSTALLATION_SOURCE/environment"
|
||||||
OS_RELEASE_ID="$(source /etc/os-release; echo $ID)"
|
|
||||||
if [[ $UID -eq 0 ]]; then
|
if [[ $UID -eq 0 ]]; then
|
||||||
case "$OS_RELEASE_ID" in
|
# Since INSTALL_PREFIX can vary, this path must be static / hard-coded
|
||||||
debian) ;&
|
# so callers always know where to find it, when installed globally (as root)
|
||||||
ubuntu)
|
msg "##### Installing automation env. vars. into $INSTALL_ENV_FILEPATH"
|
||||||
DEST_FILEPATH=/etc/environment
|
cat "$INSTALLATION_SOURCE/environment" >> "$INSTALL_ENV_FILEPATH"
|
||||||
;;
|
|
||||||
fedora)
|
|
||||||
DEST_FILEPATH=/etc/profile.d/zz_automation.sh
|
|
||||||
msg "Warning: Removing any existing, system-wide environment configuration"
|
|
||||||
rm -vf "$DEST_FILEPATH"
|
|
||||||
;;
|
|
||||||
*) msg "Unknown/Unsupported OS '$OS_RELEASE_ID'"
|
|
||||||
exit 14
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
msg "##### Making automation environment available by default, system-wide as $DEST_FILEPATH"
|
|
||||||
cat "$INSTALLATION_SOURCE/environment" >> "$DEST_FILEPATH"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,7 +109,8 @@ install_automation() {
|
||||||
fi
|
fi
|
||||||
# Allow re-installing different versions, clean out old version if found
|
# Allow re-installing different versions, clean out old version if found
|
||||||
if [[ -d "$actual_inst_path" ]] && [[ -r "$actual_inst_path/AUTOMATION_VERSION" ]]; then
|
if [[ -d "$actual_inst_path" ]] && [[ -r "$actual_inst_path/AUTOMATION_VERSION" ]]; then
|
||||||
local installed_version=$(cat "$actual_inst_path/AUTOMATION_VERSION")
|
local installed_version
|
||||||
|
installed_version=$(<"$actual_inst_path/AUTOMATION_VERSION")
|
||||||
msg "Warning: Removing existing installed version '$installed_version'"
|
msg "Warning: Removing existing installed version '$installed_version'"
|
||||||
rm -rvf "$actual_inst_path"
|
rm -rvf "$actual_inst_path"
|
||||||
elif [[ -d "$actual_inst_path" ]]; then
|
elif [[ -d "$actual_inst_path" ]]; then
|
||||||
|
@ -120,8 +126,8 @@ install_automation() {
|
||||||
|
|
||||||
dbg "Configuring environment file $INSTALLATION_SOURCE/environment"
|
dbg "Configuring environment file $INSTALLATION_SOURCE/environment"
|
||||||
cat <<EOF>"$INSTALLATION_SOURCE/environment"
|
cat <<EOF>"$INSTALLATION_SOURCE/environment"
|
||||||
# Added on $(date --iso-8601=minutes) by $actual_inst_path/bin/$SCRIPT_FILENAME"
|
# Added on $(date --utc --iso-8601=minutes) by $actual_inst_path/bin/$SCRIPT_FILENAME"
|
||||||
# Any manual modifications will be lost upon upgrade or reinstall.
|
# for version '$AUTOMATION_VERSION'. Any manual modifications will be lost upon upgrade or reinstall.
|
||||||
export AUTOMATION_LIB_PATH="$actual_inst_path/lib"
|
export AUTOMATION_LIB_PATH="$actual_inst_path/lib"
|
||||||
export PATH="$PATH:$actual_inst_path/bin"
|
export PATH="$PATH:$actual_inst_path/bin"
|
||||||
EOF
|
EOF
|
||||||
|
@ -181,6 +187,7 @@ exec_installer() {
|
||||||
# Full path is required so script can find and install itself
|
# Full path is required so script can find and install itself
|
||||||
DOWNLOADED_INSTALLER="$INSTALLATION_SOURCE/bin/$SCRIPT_FILENAME"
|
DOWNLOADED_INSTALLER="$INSTALLATION_SOURCE/bin/$SCRIPT_FILENAME"
|
||||||
if [[ -x "$DOWNLOADED_INSTALLER" ]]; then
|
if [[ -x "$DOWNLOADED_INSTALLER" ]]; then
|
||||||
|
fix_branch_ref "$DOWNLOADED_INSTALLER"
|
||||||
msg "Executing installer version '$version_arg'\n"
|
msg "Executing installer version '$version_arg'\n"
|
||||||
dbg "Using \$INSTALL_PREFIX '$INSTALL_PREFIX'; installer '$DOWNLOADED_INSTALLER'"
|
dbg "Using \$INSTALL_PREFIX '$INSTALL_PREFIX'; installer '$DOWNLOADED_INSTALLER'"
|
||||||
# Execution likely trouble-free, cancel removal on exit
|
# Execution likely trouble-free, cancel removal on exit
|
||||||
|
@ -188,7 +195,7 @@ exec_installer() {
|
||||||
# _MAGIC_JUJU set to signal actual installation work should commence
|
# _MAGIC_JUJU set to signal actual installation work should commence
|
||||||
set -x
|
set -x
|
||||||
exec env \
|
exec env \
|
||||||
DEBUG="$DEBUG" \
|
A_DEBUG="$A_DEBUG" \
|
||||||
INSTALLATION_SOURCE="$INSTALLATION_SOURCE" \
|
INSTALLATION_SOURCE="$INSTALLATION_SOURCE" \
|
||||||
INSTALL_PREFIX="$INSTALL_PREFIX" \
|
INSTALL_PREFIX="$INSTALL_PREFIX" \
|
||||||
AUTOMATION_REPO_URL="$AUTOMATION_REPO_URL" \
|
AUTOMATION_REPO_URL="$AUTOMATION_REPO_URL" \
|
||||||
|
@ -196,7 +203,7 @@ exec_installer() {
|
||||||
_MAGIC_JUJU="$_DEFAULT_MAGIC_JUJU" \
|
_MAGIC_JUJU="$_DEFAULT_MAGIC_JUJU" \
|
||||||
/bin/bash "$DOWNLOADED_INSTALLER" "$version_arg" $_ARGS
|
/bin/bash "$DOWNLOADED_INSTALLER" "$version_arg" $_ARGS
|
||||||
else
|
else
|
||||||
msg "Error: '$DOWNLOADED_INSTALLER' does not exist or is not executable" > /dev/stderr
|
msg "Error: '$DOWNLOADED_INSTALLER' does not exist or is not executable"
|
||||||
# Allow exi
|
# Allow exi
|
||||||
exit 8
|
exit 8
|
||||||
fi
|
fi
|
||||||
|
@ -211,7 +218,7 @@ check_args() {
|
||||||
msg " Use version '$MAGIC_LOCAL_VERSION' to install from local source."
|
msg " Use version '$MAGIC_LOCAL_VERSION' to install from local source."
|
||||||
msg " Use version 'latest' to install from current upstream"
|
msg " Use version 'latest' to install from current upstream"
|
||||||
exit 2
|
exit 2
|
||||||
elif ! echo "$AUTOMATION_VERSION" | egrep -q "$arg_rx"; then
|
elif ! echo "$AUTOMATION_VERSION" | grep -E -q "$arg_rx"; then
|
||||||
msg "Error: '$AUTOMATION_VERSION' does not appear to be a valid version number"
|
msg "Error: '$AUTOMATION_VERSION' does not appear to be a valid version number"
|
||||||
exit 4
|
exit 4
|
||||||
elif [[ -z "$_ARGS" ]] && [[ "$_MAGIC_JUJU" == "XXXXX" ]]; then
|
elif [[ -z "$_ARGS" ]] && [[ "$_MAGIC_JUJU" == "XXXXX" ]]; then
|
||||||
|
@ -248,12 +255,14 @@ elif [[ "$_MAGIC_JUJU" == "$_DEFAULT_MAGIC_JUJU" ]]; then
|
||||||
CHAIN_TO="$INSTALLATION_SOURCE/$arg/.install.sh"
|
CHAIN_TO="$INSTALLATION_SOURCE/$arg/.install.sh"
|
||||||
if [[ -r "$CHAIN_TO" ]]; then
|
if [[ -r "$CHAIN_TO" ]]; then
|
||||||
# Cannot assume common was installed system-wide
|
# Cannot assume common was installed system-wide
|
||||||
|
# AUTOMATION_LIB_PATH defined by anchors.sh
|
||||||
|
# shellcheck disable=SC2154
|
||||||
env AUTOMATION_LIB_PATH=$AUTOMATION_LIB_PATH \
|
env AUTOMATION_LIB_PATH=$AUTOMATION_LIB_PATH \
|
||||||
AUTOMATION_VERSION=$AUTOMATION_VERSION \
|
AUTOMATION_VERSION=$AUTOMATION_VERSION \
|
||||||
INSTALLATION_SOURCE=$INSTALLATION_SOURCE \
|
INSTALLATION_SOURCE=$INSTALLATION_SOURCE \
|
||||||
DEBUG=$DEBUG \
|
A_DEBUG=$A_DEBUG \
|
||||||
MAGIC_JUJU=$_MAGIC_JUJU \
|
MAGIC_JUJU=$_MAGIC_JUJU \
|
||||||
/bin/bash $CHAIN_TO
|
$CHAIN_TO
|
||||||
msg "##### Installation complete for '$arg' subcomponent"
|
msg "##### Installation complete for '$arg' subcomponent"
|
||||||
else
|
else
|
||||||
msg "Warning: Cannot find installer for $CHAIN_TO"
|
msg "Warning: Cannot find installer for $CHAIN_TO"
|
||||||
|
@ -267,7 +276,7 @@ elif [[ "$_MAGIC_JUJU" == "$_DEFAULT_MAGIC_JUJU" ]]; then
|
||||||
echo -n "##### Finalizing successful installation of version "
|
echo -n "##### Finalizing successful installation of version "
|
||||||
echo -n "$AUTOMATION_VERSION" | tee "$AUTOMATION_LIB_PATH/../AUTOMATION_VERSION"
|
echo -n "$AUTOMATION_VERSION" | tee "$AUTOMATION_LIB_PATH/../AUTOMATION_VERSION"
|
||||||
echo " of 'common'${_ARGS:+, and subcomponents: $_ARGS}"
|
echo " of 'common'${_ARGS:+, and subcomponents: $_ARGS}"
|
||||||
) > /dev/stderr
|
)
|
||||||
else # Something has gone horribly wrong
|
else # Something has gone horribly wrong
|
||||||
msg "Error: The installer script is incompatible with version $AUTOMATION_VERSION"
|
msg "Error: The installer script is incompatible with version $AUTOMATION_VERSION"
|
||||||
msg "Please obtain and use a newer version of $SCRIPT_FILENAME which supports ID $_MAGIC_JUJU"
|
msg "Please obtain and use a newer version of $SCRIPT_FILENAME which supports ID $_MAGIC_JUJU"
|
||||||
|
|
|
@ -20,10 +20,10 @@ runner_script_filename="$(basename $0)"
|
||||||
for test_subdir in $(find "$(realpath $(dirname $0)/../)" -type d -name test | sort -r); do
|
for test_subdir in $(find "$(realpath $(dirname $0)/../)" -type d -name test | sort -r); do
|
||||||
test_runner_filepath="$test_subdir/$runner_script_filename"
|
test_runner_filepath="$test_subdir/$runner_script_filename"
|
||||||
if [[ -x "$test_runner_filepath" ]] && [[ "$test_runner_filepath" != "$this_script_filepath" ]]; then
|
if [[ -x "$test_runner_filepath" ]] && [[ "$test_runner_filepath" != "$this_script_filepath" ]]; then
|
||||||
echo -e "\nExecuting $test_runner_filepath..." > /dev/stderr
|
echo -e "\nExecuting $test_runner_filepath..." >> /dev/stderr
|
||||||
$test_runner_filepath
|
$test_runner_filepath
|
||||||
else
|
else
|
||||||
echo -e "\nWARNING: Skipping $test_runner_filepath" > /dev/stderr
|
echo -e "\nWARNING: Skipping $test_runner_filepath" >> /dev/stderr
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Installs 'build-push' script system-wide. NOT intended to be used directly
|
||||||
|
# by humans, should only be used indirectly by running
|
||||||
|
# ../bin/install_automation.sh <ver> build-push
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
||||||
|
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
||||||
|
|
||||||
|
INSTALL_PREFIX=$(realpath $AUTOMATION_LIB_PATH/..)
|
||||||
|
# Assume the directory this script is in, represents what is being installed
|
||||||
|
INSTALL_NAME=$(basename $(dirname ${BASH_SOURCE[0]}))
|
||||||
|
AUTOMATION_VERSION=$(automation_version)
|
||||||
|
[[ -n "$AUTOMATION_VERSION" ]] || \
|
||||||
|
die "Could not determine version of common automation libs, was 'install_automation.sh' successful?"
|
||||||
|
|
||||||
|
echo "Installing $INSTALL_NAME version $(automation_version) into $INSTALL_PREFIX"
|
||||||
|
|
||||||
|
unset INST_PERM_ARG
|
||||||
|
if [[ $UID -eq 0 ]]; then
|
||||||
|
INST_PERM_ARG="-o root -g root"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd $(dirname $(realpath "${BASH_SOURCE[0]}"))
|
||||||
|
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/bin" ./bin/*
|
||||||
|
|
||||||
|
echo "Successfully installed $INSTALL_NAME"
|
|
@ -0,0 +1,114 @@
|
||||||
|
# Build-push script
|
||||||
|
|
||||||
|
This is a wrapper around buildah build, coupled with pre and post
|
||||||
|
build commands and automatic registry server push. Its goal is to
|
||||||
|
provide an abstraction layer for additional build automation. Though
|
||||||
|
it may be useful on its own, this is not its primary purpose.
|
||||||
|
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
* Executables for `jq`, and `buildah` (1.23 or later) are available.
|
||||||
|
* Automation common-library is installed & env. var set.
|
||||||
|
* Installed system-wide as per
|
||||||
|
[the top-level documentation](https://github.com/containers/automation#installation)
|
||||||
|
* -or-
|
||||||
|
* Run directly from repository clone by first doing
|
||||||
|
`export AUTOMATION_LIB_PATH=/path/to/clone/common/lib`
|
||||||
|
* Optionally, the kernel may be configured to use emulation (such as QEMU)
|
||||||
|
for non-native binary execution (where available and supported). See
|
||||||
|
[the section below for more
|
||||||
|
infomration](README.md#qemu-user-static-emulation).
|
||||||
|
|
||||||
|
|
||||||
|
## QEMU-user-static Emulation
|
||||||
|
|
||||||
|
On platforms/distro's that support it (Like F34+) this is a handy
|
||||||
|
way to enable non-native binary execution. It can therefore be
|
||||||
|
used to build container images for other non-native architectures.
|
||||||
|
Though setup may vary by distro/version, in F34 all that's needed
|
||||||
|
is to install the `qemu-user-static` package. It will take care
|
||||||
|
of automatically registering the emulation executables with the
|
||||||
|
kernel.
|
||||||
|
|
||||||
|
Otherwise, you may find these [handy/dandy scripts and
|
||||||
|
container images useful](https://github.com/multiarch/qemu-user-static#multiarchqemu-user-static-images) for environments without native support (like
|
||||||
|
CentOS and RHEL). However, be aware I cannot atest to the safety
|
||||||
|
or quality of those binaries/images, so use them at your own risk.
|
||||||
|
Something like this (as **root**):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~# install qemu user static binaries somehow
|
||||||
|
~# qemu_setup_fqin="docker.io/multiarch/qemu-user-static:latest"
|
||||||
|
~# vol_awk='{print "-v "$1":"$1""}'
|
||||||
|
~# bin_vols=$(find /usr/bin -name 'qemu-*-static' | awk -e "$vol_awk" | tr '\n' ' ')
|
||||||
|
~# podman run --rm --privileged $bin_vols $qemu_setup_fqin --reset -p yes
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: You may need to alter `$vol_awk` or the `podman` command line
|
||||||
|
depending on what your platform supports.
|
||||||
|
|
||||||
|
|
||||||
|
## Use in build automation
|
||||||
|
|
||||||
|
This script may be useful as a uniform interface for building and pushing
|
||||||
|
for multiple architectures, all in one go. A simple example would be:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ export SOME_USERNAME=foo # normally hidden/secured in the CI system
|
||||||
|
$ export SOME_PASSWORD=bar # along with this password value.
|
||||||
|
|
||||||
|
$ build-push.sh --arches=arm64,ppc64le,s390x quay.io/some/thing ./path/to/contextdir
|
||||||
|
```
|
||||||
|
|
||||||
|
In this case, the image `quay.io/some/thing:latest` would be built for the
|
||||||
|
listed architectures, then pushed to the remote registry server.
|
||||||
|
|
||||||
|
### Use in automation with additional preparation
|
||||||
|
|
||||||
|
When building for multiple architectures using emulation, it's vastly
|
||||||
|
more efficient to execute as few non-native RUN instructions as possible.
|
||||||
|
This is supported by the `--prepcmd` option, which specifies a shell
|
||||||
|
command-string to execute prior to building the image. The command-string
|
||||||
|
will have access to a set of exported env. vars. for use and/or
|
||||||
|
substitution (see the `--help` output for details).
|
||||||
|
|
||||||
|
For example, this command string could be used to seed the build cache
|
||||||
|
by pulling down previously built image of the same name:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ build-push.sh ... quay.io/test/ing --prepcmd='$RUNTIME pull $FQIN:latest'
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, the command `buildah pull quay.io/test/ing:latest` will
|
||||||
|
be executed prior to the build.
|
||||||
|
|
||||||
|
### Use in automation with modified images
|
||||||
|
|
||||||
|
Sometimes additional steps need to be performed after the build, to modify,
|
||||||
|
inspect or additionally tag the built image before it's pushed. This could
|
||||||
|
include (for example) running tests on the image, or modifying its metadata
|
||||||
|
in some way. All these and more are supported by the `--modcmd` option.
|
||||||
|
|
||||||
|
Simply feed it a command string to be run after a successful build. The
|
||||||
|
command-string script will have access to a set of exported env. vars.
|
||||||
|
for use and/or substitution (see the `--help` output for details).
|
||||||
|
|
||||||
|
After executing a `--modcmd`, `build-push.sh` will take care to identify
|
||||||
|
all images related to the original FQIN (minus the tag). Should
|
||||||
|
additional tags be present, they will also be pushed (absent the
|
||||||
|
`--nopush` flag). If any/all images are missing, they will be silently
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
For example you could use this to only push version-tagged images, and
|
||||||
|
never `latest`:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ build-push.sh ... --modcmd='$RUNTIME tag $FQIN:latest $FQIN:9.8.7 && \
|
||||||
|
$RUNTIME manifest rm $FQIN:latest'
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: If your `--modcmd` command or script removes **ALL** tags, and
|
||||||
|
`--nopush` was **not** specified, an error message will be printed
|
||||||
|
followed by a non-zero exit. This is intended to help automation
|
||||||
|
catch an assumed missed-expectation.
|
|
@ -0,0 +1,481 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This is a wrapper around buildah build, coupled with pre and post
|
||||||
|
# build commands and automatic registry server push. Its goal is to
|
||||||
|
# provide an abstraction layer for additional build automation. Though
|
||||||
|
# it may be useful on its own, this is not its primary purpose.
|
||||||
|
#
|
||||||
|
# See the README.md file for more details
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
# This is a convenience for callers that don't separately source this first
|
||||||
|
# in their automation setup.
|
||||||
|
if [[ -z "$AUTOMATION_LIB_PATH" ]] && [[ -r /etc/automation_environment ]]; then
|
||||||
|
set -a
|
||||||
|
source /etc/automation_environment
|
||||||
|
set +a
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -r "$AUTOMATION_LIB_PATH/common_lib.sh" ]]; then
|
||||||
|
(
|
||||||
|
echo "ERROR: Expecting \$AUTOMATION_LIB_PATH to contain the installation"
|
||||||
|
echo " directory path for the common automation tooling."
|
||||||
|
echo " Please refer to the README.md for installation instructions."
|
||||||
|
) >> /dev/stderr
|
||||||
|
exit 2 # Verified by tests
|
||||||
|
fi
|
||||||
|
|
||||||
|
source $AUTOMATION_LIB_PATH/common_lib.sh
|
||||||
|
|
||||||
|
SCRIPT_FILEPATH=$(realpath "${BASH_SOURCE[0]}")
|
||||||
|
|
||||||
|
# Useful for non-standard installations & testing
|
||||||
|
RUNTIME="${RUNTIME:-$(type -P buildah||echo /bin/true)}" # see check_dependencies()
|
||||||
|
|
||||||
|
# List of variable names to export for --prepcmd and --modcmd
|
||||||
|
# N/B: Bash cannot export arrays
|
||||||
|
_CMD_ENV="SCRIPT_FILEPATH RUNTIME PLATFORMOS FQIN CONTEXT
|
||||||
|
PUSH ARCHES REGSERVER NAMESPACE IMGNAME PREPCMD MODCMD"
|
||||||
|
|
||||||
|
# Simple error-message strings
|
||||||
|
E_FQIN="Must specify a valid 3-component FQIN w/o a tag, not:"
|
||||||
|
E_CONTEXT="Given context path is not an existing directory:"
|
||||||
|
E_ONEARCH="Must specify --arches=<value> with '=', and <value> being a comma-separated list, not:"
|
||||||
|
_E_PREPMOD_SFX="with '=', and <value> being a (quoted) string, not:"
|
||||||
|
E_USERPASS="When --nopush not specified, must export non-empty value for variable:"
|
||||||
|
E_USAGE="
|
||||||
|
Usage: $(basename ${BASH_SOURCE[0]}) [options] <FQIN> <Context> [extra...]
|
||||||
|
|
||||||
|
With the required arguments (See also, 'Required Environment Variables'):
|
||||||
|
|
||||||
|
<FQIN> is the fully-qualified image name to build and push. It must
|
||||||
|
contain only three components: Registry FQDN:PORT, Namespace, and
|
||||||
|
Image Name. The image tag must NOT be specified, see --modcmd=<value>
|
||||||
|
option below.
|
||||||
|
|
||||||
|
<Context> is the full build-context DIRECTORY path containing the
|
||||||
|
target Dockerfile or Containerfile. This must be a local path to
|
||||||
|
an existing directory.
|
||||||
|
|
||||||
|
Zero or more [options] and [extra...] optional arguments:
|
||||||
|
|
||||||
|
--help if specified, will display this usage/help message.
|
||||||
|
|
||||||
|
--arches=<value> specifies a comma-separated list of architectures
|
||||||
|
to build. When unspecified, the local system's architecture will
|
||||||
|
be used. Architecture names must be the canonical values used/supported
|
||||||
|
by golang and available/included in the base-image's manifest list.
|
||||||
|
Note: The '=' is required.
|
||||||
|
|
||||||
|
--prepcmd=<value> specifies a bash string to execute just prior to
|
||||||
|
building. Any embedded quoting will be preserved. Any output produced
|
||||||
|
will be displayed, but ignored. See the 'Environment for...' section
|
||||||
|
below for details on what env. vars. are made available for use
|
||||||
|
by/substituted in <value>.
|
||||||
|
|
||||||
|
--modcmd=<value> specifies a bash string to execute after a successful
|
||||||
|
build but prior to pushing any image(s). Any embedded quoting will be
|
||||||
|
preserved. Output from the script will be displayed, but ignored.
|
||||||
|
Any tags which should/shouldn't be pushed must be handled by this
|
||||||
|
command/script (including complete removal or replacement). See the
|
||||||
|
'Environment for...' section below for details on what env. vars.
|
||||||
|
are made available for use by/substituted in <value>. If no
|
||||||
|
FQIN tags remain, an error will be printed and the script will exit
|
||||||
|
non-zero.
|
||||||
|
|
||||||
|
--nopush will bypass pushing the built/tagged image(s).
|
||||||
|
|
||||||
|
[extra...] specifies optional, additional arguments to pass when building
|
||||||
|
images. For example, this may be used to pass in [actual] build-args, or
|
||||||
|
volume-mounts.
|
||||||
|
|
||||||
|
Environment for --prepcmd and --modcmd
|
||||||
|
|
||||||
|
The shell environment for executing these strings will contain the
|
||||||
|
following environment variables and their values at runtime:
|
||||||
|
|
||||||
|
$_CMD_ENV
|
||||||
|
|
||||||
|
Additionally, unless --nopush was specified, the host will be logged
|
||||||
|
into the registry server.
|
||||||
|
|
||||||
|
Required Environment Variables
|
||||||
|
|
||||||
|
Unless --nopush is used, \$<NAMESPACE>_USERNAME and
|
||||||
|
\$<NAMESPACE>_PASSWORD must contain the necessary registry
|
||||||
|
credentials. The value for <NAMESPACE> is always capitalized.
|
||||||
|
The account is assumed to have 'write' access to push the built
|
||||||
|
image.
|
||||||
|
|
||||||
|
Optional Environment Variables:
|
||||||
|
|
||||||
|
\$RUNTIME specifies the complete path to an alternate executable
|
||||||
|
to use for building. Defaults to the location of 'buildah'.
|
||||||
|
|
||||||
|
\$PARALLEL_JOBS specifies the number of builds to execute in parallel.
|
||||||
|
When unspecified, it defaults to the number of processor (threads) on
|
||||||
|
the system.
|
||||||
|
"
|
||||||
|
|
||||||
|
# Show an error message, followed by usage text to stderr
|
||||||
|
die_help() {
|
||||||
|
local err="${1:-No error message specified}"
|
||||||
|
msg "Please use --help for usage information."
|
||||||
|
die "$err"
|
||||||
|
}
|
||||||
|
|
||||||
|
init() {
|
||||||
|
# /bin/true is used by unit-tests
|
||||||
|
if [[ "$RUNTIME" =~ true ]] || [[ ! $(type -P "$RUNTIME") ]]; then
|
||||||
|
die_help "Unable to find \$RUNTIME ($RUNTIME) on path: $PATH"
|
||||||
|
fi
|
||||||
|
if [[ -n "$PARALLEL_JOBS" ]] && [[ ! "$PARALLEL_JOBS" =~ ^[0-9]+$ ]]; then
|
||||||
|
PARALLEL_JOBS=""
|
||||||
|
fi
|
||||||
|
# Can't use $(uname -m) because (for example) "x86_64" != "amd64" in registries
|
||||||
|
# This will be verified, see check_dependencies().
|
||||||
|
NATIVE_GOARCH="${NATIVE_GOARCH:-$($RUNTIME info --format='{{.host.arch}}')}"
|
||||||
|
PARALLEL_JOBS="${PARALLEL_JOBS:-$($RUNTIME info --format='{{.host.cpus}}')}"
|
||||||
|
|
||||||
|
dbg "Found native go-arch: $NATIVE_GOARCH"
|
||||||
|
dbg "Found local CPU count: $PARALLEL_JOBS"
|
||||||
|
|
||||||
|
if [[ -z "$NATIVE_GOARCH" ]]; then
|
||||||
|
die_help "Unable to determine the local system architecture, is \$RUNTIME correct: '$RUNTIME'"
|
||||||
|
elif ! type -P jq &>/dev/null; then
|
||||||
|
die_help "Unable to find 'jq' executable on path: $PATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Not likely overridden, but keep the possibility open
|
||||||
|
PLATFORMOS="${PLATFORMOS:-linux}"
|
||||||
|
|
||||||
|
# Env. vars set by parse_args()
|
||||||
|
FQIN="" # required (fully-qualified-image-name)
|
||||||
|
CONTEXT="" # required (directory path)
|
||||||
|
PUSH=1 # optional (1 means push, 0 means do not)
|
||||||
|
ARCHES="$NATIVE_GOARCH" # optional (Native architecture default)
|
||||||
|
PREPCMD="" # optional (--prepcmd)
|
||||||
|
MODCMD="" # optional (--modcmd)
|
||||||
|
declare -a BUILD_ARGS
|
||||||
|
BUILD_ARGS=() # optional
|
||||||
|
REGSERVER="" # parsed out of $FQIN
|
||||||
|
NAMESPACE="" # parsed out of $FQIN
|
||||||
|
IMGNAME="" # parsed out of $FQIN
|
||||||
|
LOGGEDIN=0 # indicates successful $REGSERVER/$NAMESPACE login
|
||||||
|
unset NAMESPACE_USERNAME # lookup based on $NAMESPACE when $PUSH=1
|
||||||
|
unset NAMESPACE_PASSWORD # lookup based on $NAMESPACE when $PUSH=1
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
set +e
|
||||||
|
if ((LOGGEDIN)) && ! $RUNTIME logout "$REGSERVER/$NAMESPACE"; then
|
||||||
|
warn "Logout of registry '$REGSERVER/$NAMESPACE' failed."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_args() {
|
||||||
|
local -a args
|
||||||
|
local arg
|
||||||
|
local archarg
|
||||||
|
local nsu_var
|
||||||
|
local nsp_var
|
||||||
|
|
||||||
|
dbg "in parse_args()"
|
||||||
|
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
die_help "Must specify non-empty values for required arguments."
|
||||||
|
fi
|
||||||
|
|
||||||
|
args=("$@") # Special-case quoting: Will NOT separate quoted arguments
|
||||||
|
for arg in "${args[@]}"; do
|
||||||
|
dbg "Processing parameter '$arg'"
|
||||||
|
case "$arg" in
|
||||||
|
--arches=*)
|
||||||
|
archarg=$(tr ',' ' '<<<"${arg:9}")
|
||||||
|
if [[ -z "$archarg" ]]; then die_help "$E_ONEARCH '$arg'"; fi
|
||||||
|
ARCHES="$archarg"
|
||||||
|
;;
|
||||||
|
--arches)
|
||||||
|
# Argument format not supported (to simplify parsing logic)
|
||||||
|
die_help "$E_ONEARCH '$arg'"
|
||||||
|
;;
|
||||||
|
--prepcmd=*)
|
||||||
|
# Bash argument processing automatically strips any outside quotes
|
||||||
|
PREPCMD="${arg:10}"
|
||||||
|
;;
|
||||||
|
--prepcmd)
|
||||||
|
die_help "Must specify --prepcmd=<value> $_E_PREPMOD_SFX '$arg'"
|
||||||
|
;;
|
||||||
|
--modcmd=*)
|
||||||
|
MODCMD="${arg:9}"
|
||||||
|
;;
|
||||||
|
--modcmd)
|
||||||
|
die_help "Must specify --modcmd=<value> $_E_PREPMOD_SFX '$arg'"
|
||||||
|
;;
|
||||||
|
--nopush)
|
||||||
|
dbg "Nopush flag detected, will NOT push built images."
|
||||||
|
PUSH=0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
if [[ -z "$FQIN" ]]; then
|
||||||
|
dbg "Grabbing FQIN parameter: '$arg'."
|
||||||
|
FQIN="$arg"
|
||||||
|
REGSERVER=$(awk -F '/' '{print $1}' <<<"$FQIN")
|
||||||
|
NAMESPACE=$(awk -F '/' '{print $2}' <<<"$FQIN")
|
||||||
|
IMGNAME=$(awk -F '/' '{print $3}' <<<"$FQIN")
|
||||||
|
elif [[ -z "$CONTEXT" ]]; then
|
||||||
|
dbg "Grabbing Context parameter: '$arg'."
|
||||||
|
CONTEXT=$(realpath -e -P $arg || die_help "$E_CONTEXT '$arg'")
|
||||||
|
else
|
||||||
|
# Hack: Allow array addition to handle any embedded special characters
|
||||||
|
# shellcheck disable=SC2207
|
||||||
|
BUILD_ARGS+=($(printf "%q" "$arg"))
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
if ((PUSH)) && [[ -n "$NAMESPACE" ]]; then
|
||||||
|
set +x # Don't expose any secrets if somehow we got into -x mode
|
||||||
|
nsu_var="$(tr '[:lower:]' '[:upper:]'<<<${NAMESPACE})_USERNAME"
|
||||||
|
nsp_var="$(tr '[:lower:]' '[:upper:]'<<<${NAMESPACE})_PASSWORD"
|
||||||
|
dbg "Confirming non-empty \$$nsu_var and \$$nsp_var"
|
||||||
|
# These will be unset after logging into the registry
|
||||||
|
NAMESPACE_USERNAME="${!nsu_var}"
|
||||||
|
NAMESPACE_PASSWORD="${!nsp_var}"
|
||||||
|
# Leak as little as possible into any child processes
|
||||||
|
unset "$nsu_var" "$nsp_var"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# validate parsed argument contents
|
||||||
|
if [[ -z "$FQIN" ]]; then
|
||||||
|
die_help "$E_FQIN '<empty>'"
|
||||||
|
elif [[ -z "$REGSERVER" ]] || [[ -z "$NAMESPACE" ]] || [[ -z "$IMGNAME" ]]; then
|
||||||
|
die_help "$E_FQIN '$FQIN'"
|
||||||
|
elif [[ -z "$CONTEXT" ]]; then
|
||||||
|
die_help "$E_CONTEXT ''"
|
||||||
|
fi
|
||||||
|
test $(tr -d -c '/' <<<"$FQIN" | wc -c) = '2' || \
|
||||||
|
die_help "$E_FQIN '$FQIN'"
|
||||||
|
test -r "$CONTEXT/Containerfile" || \
|
||||||
|
test -r "$CONTEXT/Dockerfile" || \
|
||||||
|
die_help "Given context path does not contain a Containerfile or Dockerfile: '$CONTEXT'"
|
||||||
|
|
||||||
|
if ((PUSH)); then
|
||||||
|
test -n "$NAMESPACE_USERNAME" || \
|
||||||
|
die_help "$E_USERPASS '\$$nsu_var'"
|
||||||
|
test -n "$NAMESPACE_PASSWORD" || \
|
||||||
|
die_help "$E_USERPASS '\$$nsp_var'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
dbg "Processed:
|
||||||
|
RUNTIME='$RUNTIME'
|
||||||
|
FQIN='$FQIN'
|
||||||
|
CONTEXT='$CONTEXT'
|
||||||
|
PUSH='$PUSH'
|
||||||
|
ARCHES='$ARCHES'
|
||||||
|
MODCMD='$MODCMD'
|
||||||
|
BUILD_ARGS=$(echo -n "${BUILD_ARGS[@]}")
|
||||||
|
REGSERVER='$REGSERVER'
|
||||||
|
NAMESPACE='$NAMESPACE'
|
||||||
|
IMGNAME='$IMGNAME'
|
||||||
|
namespace username chars: '${#NAMESPACE_USERNAME}'
|
||||||
|
namespace password chars: '${#NAMESPACE_PASSWORD}'
|
||||||
|
"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Build may have a LOT of output, use a standard stage-marker
|
||||||
|
# to ease reading and debugging from the wall-o-text
|
||||||
|
stage_notice() {
|
||||||
|
local msg
|
||||||
|
# N/B: It would be nice/helpful to resolve any env. vars. in '$@'
|
||||||
|
# for display. Unfortunately this is hard to do safely
|
||||||
|
# with (e.g.) eval echo "$@" :(
|
||||||
|
msg="$*"
|
||||||
|
(
|
||||||
|
echo "############################################################"
|
||||||
|
echo "$msg"
|
||||||
|
echo "############################################################"
|
||||||
|
) >> /dev/stderr
|
||||||
|
}
|
||||||
|
|
||||||
|
BUILTIID="" # populated with the image-id on successful build
|
||||||
|
parallel_build() {
|
||||||
|
local arch
|
||||||
|
local platforms=""
|
||||||
|
local output
|
||||||
|
local _fqin
|
||||||
|
local -a _args
|
||||||
|
|
||||||
|
_fqin="$1"
|
||||||
|
dbg "in parallel_build($_fqin)"
|
||||||
|
req_env_vars FQIN ARCHES CONTEXT REGSERVER NAMESPACE IMGNAME
|
||||||
|
req_env_vars PARALLEL_JOBS PLATFORMOS RUNTIME _fqin
|
||||||
|
|
||||||
|
for arch in $ARCHES; do
|
||||||
|
platforms="${platforms:+$platforms,}$PLATFORMOS/$arch"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Need to build up the command from parts b/c array conversion is handled
|
||||||
|
# in strange and non-obvious ways when it comes to embedded whitespace.
|
||||||
|
_args=(--layers --force-rm --jobs="$PARALLEL_JOBS" --platform="$platforms"
|
||||||
|
--manifest="$_fqin" "$CONTEXT")
|
||||||
|
|
||||||
|
# Keep user-specified BUILD_ARGS near the beginning so errors are easy to spot
|
||||||
|
# Provide a copy of the output in case something goes wrong in a complex build
|
||||||
|
stage_notice "Executing build command: '$RUNTIME build ${BUILD_ARGS[*]} ${_args[*]}'"
|
||||||
|
"$RUNTIME" build "${BUILD_ARGS[@]}" "${_args[@]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
confirm_arches() {
|
||||||
|
local inspjson
|
||||||
|
local filter=".manifests[].platform.architecture"
|
||||||
|
local arch
|
||||||
|
local maniarches
|
||||||
|
|
||||||
|
dbg "in confirm_arches()"
|
||||||
|
req_env_vars FQIN ARCHES RUNTIME
|
||||||
|
if ! inspjson=$($RUNTIME manifest inspect "containers-storage:$FQIN:latest"); then
|
||||||
|
die "Error reading manifest list metadata for 'containers-storage:$FQIN:latest'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert into space-delimited string for grep error message (below)
|
||||||
|
# TODO: Use an array instead, could be simpler? Would need testing.
|
||||||
|
if ! maniarches=$(jq -r "$filter" <<<"$inspjson" | \
|
||||||
|
grep -v 'null' | \
|
||||||
|
tr -s '[:space:]' ' ' | \
|
||||||
|
sed -z '$ s/[\n ]$//'); then
|
||||||
|
die "Error processing manifest list metadata:
|
||||||
|
$inspjson"
|
||||||
|
fi
|
||||||
|
dbg "Found manifest arches: $maniarches"
|
||||||
|
|
||||||
|
for arch in $ARCHES; do
|
||||||
|
grep -q "$arch" <<<"$maniarches" || \
|
||||||
|
die "Failed to locate the $arch arch. in the $FQIN:latest manifest-list: $maniarches"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
registry_login() {
|
||||||
|
dbg "in registry_login()"
|
||||||
|
req_env_vars PUSH LOGGEDIN
|
||||||
|
|
||||||
|
if ((PUSH)) && ! ((LOGGEDIN)); then
|
||||||
|
req_env_vars NAMESPACE_USERNAME NAMESPACE_PASSWORD REGSERVER NAMESPACE
|
||||||
|
dbg " Logging in"
|
||||||
|
echo "$NAMESPACE_PASSWORD" | \
|
||||||
|
$RUNTIME login --username "$NAMESPACE_USERNAME" --password-stdin \
|
||||||
|
"$REGSERVER/$NAMESPACE"
|
||||||
|
LOGGEDIN=1
|
||||||
|
elif ((PUSH)); then
|
||||||
|
dbg " Already logged in"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# No reason to keep these around any longer
|
||||||
|
unset NAMESPACE_USERNAME NAMESPACE_PASSWORD
|
||||||
|
}
|
||||||
|
|
||||||
|
run_prepmod_cmd() {
|
||||||
|
local kind="$1"
|
||||||
|
shift
|
||||||
|
dbg "Exporting variables '$_CMD_ENV'"
|
||||||
|
# The indirect export is intentional here
|
||||||
|
# shellcheck disable=SC2163
|
||||||
|
export $_CMD_ENV
|
||||||
|
stage_notice "Executing $kind-command: " "$@"
|
||||||
|
bash -c "$@"
|
||||||
|
dbg "$kind command successful"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Outputs sorted list of FQIN w/ tags to stdout, silent otherwise
|
||||||
|
get_manifest_tags() {
|
||||||
|
local result_json
|
||||||
|
local fqin_names
|
||||||
|
dbg "in get_manifest_fqins()"
|
||||||
|
|
||||||
|
# At the time of this comment, there is no reliable way to
|
||||||
|
# lookup all tags based solely on inspecting a manifest.
|
||||||
|
# However, since we know $FQIN (remember, value has no tag) we can
|
||||||
|
# use it to search all related names in container storage. Unfortunately
|
||||||
|
# because images can have multiple tags, the `reference` filter
|
||||||
|
# can return names we don't care about. Work around this with a
|
||||||
|
# grep of $FQIN in the results.
|
||||||
|
if ! result_json=$($RUNTIME images --json --filter=reference=$FQIN); then
|
||||||
|
die "Error listing manifest-list images that reference '$FQIN'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
dbg "Image listing json: $result_json"
|
||||||
|
if [[ -n "$result_json" ]]; then # N/B: value could be '[]'
|
||||||
|
# Rely on the caller to handle an empty list, ignore items missing a name key.
|
||||||
|
if ! fqin_names=$(jq -r '.[]? | .names[]?'<<<"$result_json"); then
|
||||||
|
die "Error obtaining image names from '$FQIN' manifest-list search result:
|
||||||
|
$result_json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
dbg "Sorting fqin_names"
|
||||||
|
# Don't emit an empty newline when the list is empty
|
||||||
|
[[ -z "$fqin_names" ]] || \
|
||||||
|
sort <<<"$fqin_names"
|
||||||
|
fi
|
||||||
|
dbg "get_manifest_tags() returning successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
push_images() {
|
||||||
|
local fqin_list
|
||||||
|
local fqin
|
||||||
|
dbg "in push_images()"
|
||||||
|
|
||||||
|
# It's possible that --modcmd=* removed all images, make sure
|
||||||
|
# this is known to the caller.
|
||||||
|
if ! fqin_list=$(get_manifest_tags); then
|
||||||
|
die "Retrieving set of manifest-list tags to push for '$FQIN'"
|
||||||
|
fi
|
||||||
|
if [[ -z "$fqin_list" ]]; then
|
||||||
|
warn "No FQIN(s) to be pushed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ((PUSH)); then
|
||||||
|
dbg "Will try to push FQINs: '$fqin_list'"
|
||||||
|
|
||||||
|
registry_login
|
||||||
|
for fqin in $fqin_list; do
|
||||||
|
# Note: --all means push manifest AND images it references
|
||||||
|
msg "Pushing $fqin"
|
||||||
|
$RUNTIME manifest push --all $fqin docker://$fqin
|
||||||
|
done
|
||||||
|
else
|
||||||
|
# Even if --nopush was specified, be helpful to humans with a lookup of all the
|
||||||
|
# relevant tags for $FQIN that would have been pushed and display them.
|
||||||
|
warn "Option --nopush specified, not pushing: '$fqin_list'"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
##### MAIN() #####
|
||||||
|
|
||||||
|
# Handle requested help first before anything else
|
||||||
|
if grep -q -- '--help' <<<"$@"; then
|
||||||
|
echo "$E_USAGE" >> /dev/stdout # allow grep'ing
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
init
|
||||||
|
parse_args "$@"
|
||||||
|
if [[ -n "$PREPCMD" ]]; then
|
||||||
|
registry_login
|
||||||
|
run_prepmod_cmd prep "$PREPCMD"
|
||||||
|
fi
|
||||||
|
|
||||||
|
parallel_build "$FQIN:latest"
|
||||||
|
|
||||||
|
# If a parallel build or the manifest-list assembly fails, buildah
|
||||||
|
# may still exit successfully. Catch this condition by verifying
|
||||||
|
# all expected arches are present in the manifest list.
|
||||||
|
confirm_arches
|
||||||
|
|
||||||
|
if [[ -n "$MODCMD" ]]; then
|
||||||
|
registry_login
|
||||||
|
run_prepmod_cmd mod "$MODCMD"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Handles --nopush internally
|
||||||
|
push_images
|
|
@ -0,0 +1,43 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Need to keep track of values from 'build' to 'manifest' calls
|
||||||
|
DATF='/tmp/fake_buildah.json'
|
||||||
|
|
||||||
|
if [[ "$1" == "build" ]]; then
|
||||||
|
echo '{"manifests":[' > $DATF
|
||||||
|
for arg; do
|
||||||
|
if [[ "$arg" =~ --platform= ]]; then
|
||||||
|
for platarch in $(cut -d '=' -f 2 <<<"$arg" | tr ',' ' '); do
|
||||||
|
arch=$(cut -d '/' -f 2 <<<"$platarch")
|
||||||
|
[[ -n "$arch" ]] || continue
|
||||||
|
echo "FAKEBUILDAH ($arch)" > /dev/stderr
|
||||||
|
echo -n ' {"platform":{"architecture":"' >> $DATF
|
||||||
|
echo -n "$arch" >> $DATF
|
||||||
|
echo '"}},' >> $DATF
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
# dummy-value to avoid dealing with JSON oddity: last item must not
|
||||||
|
# end with a comma
|
||||||
|
echo ' {}' >> $DATF
|
||||||
|
echo ']}' >> $DATF
|
||||||
|
|
||||||
|
# Tests expect to see this
|
||||||
|
echo "FAKEBUILDAH $@"
|
||||||
|
elif [[ "$1" == "manifest" ]]; then
|
||||||
|
# validate json while outputing it
|
||||||
|
jq . $DATF
|
||||||
|
elif [[ "$1" == "info" ]]; then
|
||||||
|
case "$@" in
|
||||||
|
*arch*) echo "amd64" ;;
|
||||||
|
*cpus*) echo "2" ;;
|
||||||
|
*) exit 1 ;;
|
||||||
|
esac
|
||||||
|
elif [[ "$1" == "images" ]]; then
|
||||||
|
echo '[{"names":["localhost/foo/bar:latest"]}]'
|
||||||
|
else
|
||||||
|
echo "ERROR: Unexpected arg '$1' to fake_buildah.sh" >> /dev/stderr
|
||||||
|
exit 9
|
||||||
|
fi
|
|
@ -0,0 +1,24 @@
|
||||||
|
|
||||||
|
|
||||||
|
# This script is intend for use by tests, DO NOT EXECUTE.
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
if [[ "$CIRRUS_CI" == "true" ]]; then
|
||||||
|
# Cirrus-CI is setup (see .cirrus.yml) to run tests on CentOS
|
||||||
|
# for simplicity, but it has no native qemu-user-static. For
|
||||||
|
# the benefit of CI testing, cheat and use whatever random
|
||||||
|
# emulators are included in the container image.
|
||||||
|
|
||||||
|
# N/B: THIS IS NOT SAFE FOR PRODUCTION USE!!!!!
|
||||||
|
podman run --rm --privileged \
|
||||||
|
mirror.gcr.io/multiarch/qemu-user-static:latest \
|
||||||
|
--reset -p yes
|
||||||
|
elif [[ -x "/usr/bin/qemu-aarch64-static" ]]; then
|
||||||
|
# TODO: Better way to determine if kernel already setup?
|
||||||
|
echo "Warning: Assuming qemu-user-static is already setup"
|
||||||
|
else
|
||||||
|
echo "Error: System does not appear to have qemu-user-static setup"
|
||||||
|
exit 1
|
||||||
|
fi
|
|
@ -0,0 +1,4 @@
|
||||||
|
FROM registry.fedoraproject.org/fedora-minimal:latest
|
||||||
|
RUN /bin/true
|
||||||
|
ENTRYPOINT /bin/false
|
||||||
|
# WARNING: testbuilds.sh depends on the number of build steps
|
|
@ -0,0 +1,103 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
TEST_SOURCE_DIRPATH=$(realpath $(dirname "${BASH_SOURCE[0]}"))
|
||||||
|
|
||||||
|
# Load standardized test harness
|
||||||
|
source $TEST_SOURCE_DIRPATH/testlib.sh || exit 1
|
||||||
|
|
||||||
|
SUBJ_FILEPATH="$TEST_DIR/$SUBJ_FILENAME"
|
||||||
|
TEST_CONTEXT="$TEST_SOURCE_DIRPATH/test_context"
|
||||||
|
EMPTY_CONTEXT=$(mktemp -d -p '' .tmp_$(basename ${BASH_SOURCE[0]})_XXXX)
|
||||||
|
export NATIVE_GOARCH=$(buildah info --format='{{.host.arch}}')
|
||||||
|
|
||||||
|
test_cmd "Verify error when automation library not found" \
|
||||||
|
2 'ERROR: Expecting \$AUTOMATION_LIB_PATH' \
|
||||||
|
bash -c "AUTOMATION_LIB_PATH='' RUNTIME=/bin/true $SUBJ_FILEPATH 2>&1"
|
||||||
|
|
||||||
|
export AUTOMATION_LIB_PATH="$TEST_SOURCE_DIRPATH/../../common/lib"
|
||||||
|
|
||||||
|
test_cmd "Verify error when buildah can't be found" \
|
||||||
|
1 "ERROR: Unable to find.+/usr/local/bin" \
|
||||||
|
bash -c "RUNTIME=/bin/true $SUBJ_FILEPATH 2>&1"
|
||||||
|
|
||||||
|
# These tests don't actually need to actually build/run anything
|
||||||
|
export RUNTIME="$TEST_SOURCE_DIRPATH/fake_buildah.sh"
|
||||||
|
|
||||||
|
test_cmd "Verify error when executed w/o any arguments" \
|
||||||
|
1 "ERROR: Must.+required arguments." \
|
||||||
|
bash -c "$SUBJ_FILEPATH 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify error when specify partial required arguments" \
|
||||||
|
1 "ERROR: Must.+required arguments." \
|
||||||
|
bash -c "$SUBJ_FILEPATH foo 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify error when executed bad Containerfile directory" \
|
||||||
|
1 "ERROR:.+directory: 'bar'" \
|
||||||
|
bash -c "$SUBJ_FILEPATH foo bar 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify error when specify invalid FQIN" \
|
||||||
|
1 "ERROR:.+FQIN.+foo" \
|
||||||
|
bash -c "$SUBJ_FILEPATH foo $EMPTY_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify error when specify slightly invalid FQIN" \
|
||||||
|
1 "ERROR:.+FQIN.+foo/bar" \
|
||||||
|
bash -c "$SUBJ_FILEPATH foo/bar $EMPTY_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify error when executed bad context subdirectory" \
|
||||||
|
1 "ERROR:.+Containerfile or Dockerfile: '$EMPTY_CONTEXT'" \
|
||||||
|
bash -c "$SUBJ_FILEPATH foo/bar/baz $EMPTY_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
# no-longer needed
|
||||||
|
rm -rf "$EMPTY_CONTEXT"
|
||||||
|
unset EMPTY_CONTEXT
|
||||||
|
|
||||||
|
test_cmd "Verify --help output to stdout can be grepped" \
|
||||||
|
0 "Optional Environment Variables:" \
|
||||||
|
bash -c "$SUBJ_FILEPATH --help | grep 'Optional Environment Variables:'"
|
||||||
|
|
||||||
|
test_cmd "Confirm required username env. var. unset error" \
|
||||||
|
1 "ERROR.+BAR_USERNAME" \
|
||||||
|
bash -c "$SUBJ_FILEPATH foo/bar/baz $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Confirm required password env. var. unset error" \
|
||||||
|
1 "ERROR.+BAR_PASSWORD" \
|
||||||
|
bash -c "BAR_USERNAME=snafu $SUBJ_FILEPATH foo/bar/baz $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
for arg in 'prepcmd' 'modcmd'; do
|
||||||
|
test_cmd "Verify error when --$arg specified without an '='" \
|
||||||
|
1 "ERROR:.+with '='" \
|
||||||
|
bash -c "BAR_USERNAME=snafu BAR_PASSWORD=ufans $SUBJ_FILEPATH foo/bar/baz $TEST_CONTEXT --$arg notgoingtowork 2>&1"
|
||||||
|
done
|
||||||
|
|
||||||
|
test_cmd "Verify numeric \$PARALLEL_JOBS is handled properly" \
|
||||||
|
0 "FAKEBUILDAH.+--jobs=42 " \
|
||||||
|
bash -c "PARALLEL_JOBS=42 $SUBJ_FILEPATH localhost/foo/bar --nopush $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify non-numeric \$PARALLEL_JOBS is handled properly" \
|
||||||
|
0 "FAKEBUILDAH.+--jobs=[0-9]+ " \
|
||||||
|
bash -c "PARALLEL_JOBS=badvalue $SUBJ_FILEPATH localhost/foo/bar --nopush $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
PREPCMD='echo "#####${ARCHES}#####"'
|
||||||
|
test_cmd "Verify \$ARCHES value is available to prep-command" \
|
||||||
|
0 "#####correct horse battery staple#####.+FAKEBUILDAH.+test_context" \
|
||||||
|
bash -c "$SUBJ_FILEPATH --arches=correct,horse,battery,staple localhost/foo/bar --nopush --prepcmd='$PREPCMD' $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
rx="FAKEBUILDAH build \\$'--test-build-arg=one \\\"two\\\" three\\\nfour' --anotherone=foo\\\ bar"
|
||||||
|
test_cmd "Verify special characters preserved in build-args" \
|
||||||
|
0 "$rx" \
|
||||||
|
bash -c "PARALLEL_JOBS=badvalue $SUBJ_FILEPATH localhost/foo/bar $TEST_CONTEXT --test-build-arg=\"one \\\"two\\\" three
|
||||||
|
four\" --nopush --anotherone=\"foo bar\" 2>&1"
|
||||||
|
|
||||||
|
# A specialized non-container environment required to run these
|
||||||
|
if [[ -n "$BUILD_PUSH_TEST_BUILDS" ]]; then
|
||||||
|
export RUNTIME=$(type -P buildah)
|
||||||
|
export PARALLEL_JOBS=$($RUNTIME info --format='{{.host.cpus}}')
|
||||||
|
|
||||||
|
source $(dirname "${BASH_SOURCE[0]}")/testbuilds.sh
|
||||||
|
else
|
||||||
|
echo "WARNING: Set \$BUILD_PUSH_TEST_BUILDS non-empty to fully test build_push."
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Must always happen last
|
||||||
|
exit_with_status
|
|
@ -0,0 +1,146 @@
|
||||||
|
|
||||||
|
|
||||||
|
# This script is intended to be sourced from testbin-build-push.sh.
|
||||||
|
# Any/all other usage is virtually guaranteed to fail and/or cause
|
||||||
|
# harm to the system.
|
||||||
|
|
||||||
|
for varname in RUNTIME SUBJ_FILEPATH TEST_CONTEXT TEST_SOURCE_DIRPATH TEST_FQIN BUILDAH_USERNAME BUILDAH_PASSWORD; do
|
||||||
|
value=${!varname}
|
||||||
|
if [[ -z "$value" ]]; then
|
||||||
|
echo "ERROR: Required \$$varname variable is unset/empty."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
unset value
|
||||||
|
|
||||||
|
# RUNTIME is defined by caller
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
$RUNTIME --version
|
||||||
|
test_cmd "Confirm $(basename $RUNTIME) is available" \
|
||||||
|
0 "buildah version .+" \
|
||||||
|
$RUNTIME --version
|
||||||
|
|
||||||
|
skopeo --version
|
||||||
|
test_cmd "Confirm skopeo is available" \
|
||||||
|
0 "skopeo version .+" \
|
||||||
|
skopeo --version
|
||||||
|
|
||||||
|
PREPCMD='echo "SpecialErrorMessage:$REGSERVER" >> /dev/stderr && exit 42'
|
||||||
|
# SUBJ_FILEPATH and TEST_CONTEXT are defined by caller
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
test_cmd "Confirm error output and exit(42) from --prepcmd" \
|
||||||
|
42 "SpecialErrorMessage:localhost" \
|
||||||
|
bash -c "$SUBJ_FILEPATH --nopush localhost/foo/bar $TEST_CONTEXT --prepcmd='$PREPCMD' 2>&1"
|
||||||
|
|
||||||
|
# N/B: The following are stateful - each depends on precedding test success
|
||||||
|
# and assume empty container-storage (podman system reset).
|
||||||
|
|
||||||
|
test_cmd "Confirm building native-arch test image w/ --nopush" \
|
||||||
|
0 "STEP 3/3: ENTRYPOINT /bin/false.+COMMIT" \
|
||||||
|
bash -c "A_DEBUG=1 $SUBJ_FILEPATH localhost/foo/bar $TEST_CONTEXT --nopush 2>&1"
|
||||||
|
|
||||||
|
native_arch=$($RUNTIME info --format='{{.host.arch}}')
|
||||||
|
test_cmd "Confirm native_arch was set to non-empty string" \
|
||||||
|
0 "" \
|
||||||
|
test -n "$native_arch"
|
||||||
|
|
||||||
|
test_cmd "Confirm built image manifest contains the native arch '$native_arch'" \
|
||||||
|
0 "$native_arch" \
|
||||||
|
bash -c "$RUNTIME manifest inspect localhost/foo/bar:latest | jq -r '.manifests[0].platform.architecture'"
|
||||||
|
|
||||||
|
test_cmd "Confirm rebuilding with same command uses cache" \
|
||||||
|
0 "STEP 3/3.+Using cache" \
|
||||||
|
bash -c "A_DEBUG=1 $SUBJ_FILEPATH localhost/foo/bar $TEST_CONTEXT --nopush 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Confirm manifest-list can be removed by name" \
|
||||||
|
0 "untagged: localhost/foo/bar:latest" \
|
||||||
|
$RUNTIME manifest rm containers-storage:localhost/foo/bar:latest
|
||||||
|
|
||||||
|
test_cmd "Verify expected partial failure when passing bogus architectures" \
|
||||||
|
125 "no image found in image index for architecture" \
|
||||||
|
bash -c "A_DEBUG=1 $SUBJ_FILEPATH --arches=correct,horse,battery,staple localhost/foo/bar --nopush $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
MODCMD='$RUNTIME tag $FQIN:latest $FQIN:9.8.7-testing'
|
||||||
|
test_cmd "Verify --modcmd is able to tag the manifest" \
|
||||||
|
0 "Executing mod-command" \
|
||||||
|
bash -c "A_DEBUG=1 $SUBJ_FILEPATH localhost/foo/bar $TEST_CONTEXT --nopush --modcmd='$MODCMD' 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify the tagged manifest is also present" \
|
||||||
|
0 "[a-zA-Z0-9]+" \
|
||||||
|
bash -c "$RUNTIME images --quiet localhost/foo/bar:9.8.7-testing"
|
||||||
|
|
||||||
|
test_cmd "Confirm tagged image manifest contains native arch '$native_arch'" \
|
||||||
|
0 "$native_arch" \
|
||||||
|
bash -c "$RUNTIME manifest inspect localhost/foo/bar:9.8.7-testing | jq -r '.manifests[0].platform.architecture'"
|
||||||
|
|
||||||
|
TEST_TEMP=$(mktemp -d -p '' .tmp_$(basename ${BASH_SOURCE[0]})_XXXX)
|
||||||
|
|
||||||
|
test_cmd "Confirm digest can be obtained from 'latest' manifest list" \
|
||||||
|
0 ".+" \
|
||||||
|
bash -c "$RUNTIME manifest inspect localhost/foo/bar:latest | jq -r '.manifest[0].digest' | tee $TEST_TEMP/latest_digest"
|
||||||
|
|
||||||
|
test_cmd "Confirm digest can be obtained from '9.8.7-testing' manifest list" \
|
||||||
|
0 ".+" \
|
||||||
|
bash -c "$RUNTIME manifest inspect localhost/foo/bar:9.8.7-testing | jq -r '.manifest[0].digest' | tee $TEST_TEMP/tagged_digest"
|
||||||
|
|
||||||
|
test_cmd "Verify tagged manifest image digest matches the same in latest" \
|
||||||
|
0 "" \
|
||||||
|
test "$(<$TEST_TEMP/tagged_digest)" == "$(<$TEST_TEMP/latest_digest)"
|
||||||
|
|
||||||
|
MODCMD='
|
||||||
|
set -x;
|
||||||
|
$RUNTIME images && \
|
||||||
|
$RUNTIME manifest rm $FQIN:latest && \
|
||||||
|
$RUNTIME manifest rm $FQIN:9.8.7-testing && \
|
||||||
|
echo "AllGone";
|
||||||
|
'
|
||||||
|
test_cmd "Verify --modcmd can execute command string that removes all tags" \
|
||||||
|
0 "AllGone.*No FQIN.+to be pushed" \
|
||||||
|
bash -c "A_DEBUG=1 $SUBJ_FILEPATH --modcmd='$MODCMD' localhost/foo/bar --nopush $TEST_CONTEXT 2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify previous --modcmd removed the 'latest' tagged image" \
|
||||||
|
125 "image not known" \
|
||||||
|
$RUNTIME images --quiet containers-storage:localhost/foo/bar:latest
|
||||||
|
|
||||||
|
test_cmd "Verify previous --modcmd removed the '9.8.7-testing' tagged image" \
|
||||||
|
125 "image not known" \
|
||||||
|
$RUNTIME images --quiet containers-storage:localhost/foo/bar:9.8.7-testing
|
||||||
|
|
||||||
|
FAKE_VERSION=$RANDOM
|
||||||
|
MODCMD="set -ex;
|
||||||
|
\$RUNTIME tag \$FQIN:latest \$FQIN:$FAKE_VERSION;
|
||||||
|
\$RUNTIME manifest rm \$FQIN:latest;"
|
||||||
|
# TEST_FQIN and TEST_SOURCE_DIRPATH defined by caller
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
test_cmd "Verify e2e workflow w/ additional build-args" \
|
||||||
|
0 "Pushing $TEST_FQIN:$FAKE_VERSION" \
|
||||||
|
bash -c "env A_DEBUG=1 $SUBJ_FILEPATH \
|
||||||
|
--prepcmd='touch $TEST_SOURCE_DIRPATH/test_context/Containerfile' \
|
||||||
|
--modcmd='$MODCMD' \
|
||||||
|
--arches=amd64,s390x,arm64,ppc64le \
|
||||||
|
$TEST_FQIN \
|
||||||
|
$TEST_CONTEXT \
|
||||||
|
--device=/dev/fuse --label testing=true \
|
||||||
|
2>&1"
|
||||||
|
|
||||||
|
test_cmd "Verify latest tagged image was not pushed" \
|
||||||
|
2 'reading manifest latest in quay\.io/buildah/do_not_use: manifest unknown' \
|
||||||
|
skopeo inspect docker://$TEST_FQIN:latest
|
||||||
|
|
||||||
|
test_cmd "Verify architectures can be obtained from manifest list" \
|
||||||
|
0 "" \
|
||||||
|
bash -c "$RUNTIME manifest inspect $TEST_FQIN:$FAKE_VERSION | \
|
||||||
|
jq -r '.manifests[].platform.architecture' > $TEST_TEMP/maniarches"
|
||||||
|
|
||||||
|
for arch in amd64 s390x arm64 ppc64le; do
|
||||||
|
test_cmd "Verify $arch architecture present in $TEST_FQIN:$FAKE_VERSION" \
|
||||||
|
0 "" \
|
||||||
|
grep -Fqx "$arch" $TEST_TEMP/maniarches
|
||||||
|
done
|
||||||
|
|
||||||
|
test_cmd "Verify pushed image can be removed" \
|
||||||
|
0 "" \
|
||||||
|
skopeo delete docker://$TEST_FQIN:$FAKE_VERSION
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
rm -rf "$TEST_TEMP"
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Podman First-Time Contributor Certificate Generator
|
||||||
|
|
||||||
|
This directory contains a simple web-based certificate generator to celebrate first-time contributors to the Podman project.
|
||||||
|
|
||||||
|
## Files
|
||||||
|
|
||||||
|
- **`certificate_generator.html`** - Interactive web interface for creating certificates
|
||||||
|
- **`certificate_template.html`** - The certificate template used for generation
|
||||||
|
- **`first_pr.png`** - Podman logo/branding image used in certificates
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
1. Open `certificate_generator.html` in a web browser
|
||||||
|
2. Fill in the contributor's details:
|
||||||
|
- Name
|
||||||
|
- Pull Request number
|
||||||
|
- Date (defaults to current date)
|
||||||
|
3. Preview the certificate in real-time
|
||||||
|
4. Click "Download Certificate" to save as HTML
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
These certificates are designed to recognize and celebrate community members who make their first contribution to the Podman project. The certificates feature Podman branding and can be customized for each contributor.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Feel free to improve the design, add features, or suggest enhancements to make the certificate generator even better for recognizing our amazing contributors!
|
|
@ -0,0 +1,277 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Podman Certificate Generator</title>
|
||||||
|
<style>
|
||||||
|
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;600&display=swap');
|
||||||
|
@import url('https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700;900&display=swap');
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Inter', sans-serif;
|
||||||
|
background-color: #f0f2f5;
|
||||||
|
margin: 0;
|
||||||
|
padding: 2rem;
|
||||||
|
}
|
||||||
|
.container {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 380px 1fr;
|
||||||
|
gap: 2rem;
|
||||||
|
max-width: 1600px;
|
||||||
|
margin: auto;
|
||||||
|
}
|
||||||
|
.form-panel {
|
||||||
|
background-color: white;
|
||||||
|
padding: 2rem;
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 4px 12px rgba(0,0,0,0.1);
|
||||||
|
height: fit-content;
|
||||||
|
position: sticky;
|
||||||
|
top: 2rem;
|
||||||
|
}
|
||||||
|
.form-panel h2 {
|
||||||
|
margin-top: 0;
|
||||||
|
color: #333;
|
||||||
|
font-family: 'Merriweather', serif;
|
||||||
|
}
|
||||||
|
.form-group {
|
||||||
|
margin-bottom: 1.5rem;
|
||||||
|
}
|
||||||
|
.form-group label {
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: #555;
|
||||||
|
}
|
||||||
|
.form-group input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 0.75rem;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
border-radius: 4px;
|
||||||
|
box-sizing: border-box;
|
||||||
|
font-size: 1rem;
|
||||||
|
}
|
||||||
|
.action-buttons {
|
||||||
|
display: flex;
|
||||||
|
gap: 1rem;
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
}
|
||||||
|
.action-buttons button {
|
||||||
|
flex-grow: 1;
|
||||||
|
padding: 0.75rem;
|
||||||
|
border: none;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.3s;
|
||||||
|
}
|
||||||
|
#downloadBtn {
|
||||||
|
background-color: #28a745;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
#downloadBtn:hover {
|
||||||
|
background-color: #218838;
|
||||||
|
}
|
||||||
|
.preview-panel {
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Certificate Styles (copied from template and scaled) */
|
||||||
|
.certificate {
|
||||||
|
width: 800px;
|
||||||
|
height: 1100px;
|
||||||
|
background: #fdfaf0;
|
||||||
|
border: 2px solid #333;
|
||||||
|
position: relative;
|
||||||
|
box-shadow: 0 10px 30px rgba(0,0,0,0.2);
|
||||||
|
padding: 50px;
|
||||||
|
box-sizing: border-box;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
font-family: 'Merriweather', serif;
|
||||||
|
transform: scale(0.8);
|
||||||
|
transform-origin: top center;
|
||||||
|
}
|
||||||
|
.party-popper { position: absolute; font-size: 40px; }
|
||||||
|
.top-left { top: 40px; left: 40px; }
|
||||||
|
.top-right { top: 40px; right: 40px; }
|
||||||
|
.main-title { font-size: 48px; font-weight: 900; color: #333; text-align: center; margin-top: 60px; line-height: 1.2; text-transform: uppercase; }
|
||||||
|
.subtitle { font-size: 24px; font-weight: 400; color: #333; text-align: center; margin-top: 30px; text-transform: uppercase; letter-spacing: 2px; }
|
||||||
|
.contributor-name { font-size: 56px; font-weight: 700; color: #333; text-align: center; margin: 15px 0 50px; }
|
||||||
|
.mascot-image { width: 450px; height: 450px; background-image: url('first_pr.png'); background-size: contain; background-repeat: no-repeat; background-position: center; margin-top: 20px; -webkit-print-color-adjust: exact; print-color-adjust: exact; }
|
||||||
|
.description { font-size: 22px; color: #333; line-height: 1.6; text-align: center; margin-top: 40px; }
|
||||||
|
.description strong { font-weight: 700; }
|
||||||
|
.footer { width: 100%; margin-top: auto; padding-top: 30px; border-top: 1px solid #ccc; display: flex; justify-content: space-between; align-items: flex-end; font-size: 16px; color: #333; }
|
||||||
|
.pr-info { text-align: left; }
|
||||||
|
.signature { text-align: right; font-style: italic; }
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
body {
|
||||||
|
background: #fff;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
.form-panel, .action-buttons {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.container {
|
||||||
|
display: block;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
.preview-panel {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
.certificate {
|
||||||
|
transform: scale(1);
|
||||||
|
box-shadow: none;
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
page-break-inside: avoid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<div class="form-panel">
|
||||||
|
<h2>Certificate Generator</h2>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="contributorName">Contributor Name</label>
|
||||||
|
<input type="text" id="contributorName" value="Mike McGrath">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="prNumber">PR Number</label>
|
||||||
|
<input type="text" id="prNumber" value="26393">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="mergeDate">Date</label>
|
||||||
|
<input type="text" id="mergeDate" value="June 13, 2025">
|
||||||
|
</div>
|
||||||
|
<div class="action-buttons">
|
||||||
|
<button id="downloadBtn">Download HTML</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="preview-panel">
|
||||||
|
<div id="certificatePreview">
|
||||||
|
<!-- Certificate HTML will be injected here by script -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
const nameInput = document.getElementById('contributorName');
|
||||||
|
const prNumberInput = document.getElementById('prNumber');
|
||||||
|
const dateInput = document.getElementById('mergeDate');
|
||||||
|
const preview = document.getElementById('certificatePreview');
|
||||||
|
|
||||||
|
function generateCertificateHTML(name, prNumber, date) {
|
||||||
|
const prLink = `https://github.com/containers/podman/pull/${prNumber}`;
|
||||||
|
// This is the full, self-contained HTML for the certificate
|
||||||
|
return `
|
||||||
|
<div class="certificate">
|
||||||
|
<div class="party-popper top-left">🎉</div>
|
||||||
|
<div class="party-popper top-right">🎉</div>
|
||||||
|
<div class="main-title">Certificate of<br>Contribution</div>
|
||||||
|
<div class="subtitle">Awarded To</div>
|
||||||
|
<div class="contributor-name">${name}</div>
|
||||||
|
<div class="mascot-image"></div>
|
||||||
|
<div class="description">
|
||||||
|
For successfully submitting and merging their <strong>First Pull Request</strong> to the <strong>Podman project</strong>.<br>
|
||||||
|
Your contribution helps make open source better—one PR at a time!
|
||||||
|
</div>
|
||||||
|
<div class="footer">
|
||||||
|
<div class="pr-info">
|
||||||
|
<div>🔧 Merged PR: <a href="${prLink}" target="_blank">${prLink}</a></div>
|
||||||
|
<div style="margin-top: 5px;">${date}</div>
|
||||||
|
</div>
|
||||||
|
<div class="signature">
|
||||||
|
Keep hacking, keep contributing!<br>
|
||||||
|
– The Podman Community
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updatePreview() {
|
||||||
|
const name = nameInput.value || '[CONTRIBUTOR_NAME]';
|
||||||
|
const prNumber = prNumberInput.value || '[PR_NUMBER]';
|
||||||
|
const date = dateInput.value || '[DATE]';
|
||||||
|
preview.innerHTML = generateCertificateHTML(name, prNumber, date);
|
||||||
|
}
|
||||||
|
|
||||||
|
document.getElementById('downloadBtn').addEventListener('click', () => {
|
||||||
|
const name = nameInput.value || 'contributor';
|
||||||
|
const prNumber = prNumberInput.value || '00000';
|
||||||
|
const date = dateInput.value || 'Date';
|
||||||
|
|
||||||
|
const certificateHTML = generateCertificateHTML(name, prNumber, date);
|
||||||
|
const fullPageHTML = `
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Certificate for ${name}</title>
|
||||||
|
<style>
|
||||||
|
/* All the CSS from the generator page */
|
||||||
|
@import url('https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700;900&display=swap');
|
||||||
|
body { margin: 20px; font-family: 'Merriweather', serif; background: #e0e0e0; }
|
||||||
|
.certificate {
|
||||||
|
transform: scale(1);
|
||||||
|
box-shadow: none;
|
||||||
|
margin: auto;
|
||||||
|
}
|
||||||
|
/* Paste all certificate-related styles here */
|
||||||
|
.certificate { width: 800px; height: 1100px; background: #fdfaf0; border: 2px solid #333; position: relative; padding: 50px; box-sizing: border-box; display: flex; flex-direction: column; align-items: center; }
|
||||||
|
.party-popper { position: absolute; font-size: 40px; }
|
||||||
|
.top-left { top: 40px; left: 40px; }
|
||||||
|
.top-right { top: 40px; right: 40px; }
|
||||||
|
.main-title { font-size: 48px; font-weight: 900; color: #333; text-align: center; margin-top: 60px; line-height: 1.2; text-transform: uppercase; }
|
||||||
|
.subtitle { font-size: 24px; font-weight: 400; color: #333; text-align: center; margin-top: 30px; text-transform: uppercase; letter-spacing: 2px; }
|
||||||
|
.contributor-name { font-size: 56px; font-weight: 700; color: #333; text-align: center; margin: 15px 0 50px; }
|
||||||
|
.mascot-image { width: 450px; height: 450px; background-image: url('first_pr.png'); background-size: contain; background-repeat: no-repeat; background-position: center; margin-top: 20px; -webkit-print-color-adjust: exact; print-color-adjust: exact; }
|
||||||
|
.description { font-size: 22px; color: #333; line-height: 1.6; text-align: center; margin-top: 40px; }
|
||||||
|
.description strong { font-weight: 700; }
|
||||||
|
.footer { width: 100%; margin-top: auto; padding-top: 30px; border-top: 1px solid #ccc; display: flex; justify-content: space-between; align-items: flex-end; font-size: 16px; color: #333; }
|
||||||
|
.pr-info { text-align: left; }
|
||||||
|
.signature { text-align: right; font-style: italic; }
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
@page { size: A4 portrait; margin: 0; }
|
||||||
|
body, html { width: 100%; height: 100%; margin: 0; padding: 0; }
|
||||||
|
.certificate { width: 100%; height: 100%; box-shadow: none; transform: scale(1); }
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>${certificateHTML}</body>
|
||||||
|
</html>
|
||||||
|
`;
|
||||||
|
|
||||||
|
const blob = new Blob([fullPageHTML], { type: 'text/html' });
|
||||||
|
const url = URL.createObjectURL(blob);
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = url;
|
||||||
|
a.download = `podman-contribution-certificate-${name.toLowerCase().replace(/\s+/g, '-')}.html`;
|
||||||
|
document.body.appendChild(a);
|
||||||
|
a.click();
|
||||||
|
document.body.removeChild(a);
|
||||||
|
URL.revokeObjectURL(url);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add event listeners to update preview on input change
|
||||||
|
[nameInput, prNumberInput, dateInput].forEach(input => {
|
||||||
|
input.addEventListener('input', updatePreview);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initial preview generation
|
||||||
|
updatePreview();
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,175 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Podman Certificate of Contribution</title>
|
||||||
|
<style>
|
||||||
|
@import url('https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700;900&display=swap');
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 20px;
|
||||||
|
font-family: 'Merriweather', serif;
|
||||||
|
background: #e0e0e0;
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
min-height: 100vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.certificate {
|
||||||
|
width: 800px;
|
||||||
|
height: 1100px;
|
||||||
|
background: #fdfaf0;
|
||||||
|
border: 2px solid #333;
|
||||||
|
position: relative;
|
||||||
|
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.2);
|
||||||
|
padding: 50px;
|
||||||
|
box-sizing: border-box;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.party-popper {
|
||||||
|
position: absolute;
|
||||||
|
font-size: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.top-left {
|
||||||
|
top: 40px;
|
||||||
|
left: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.top-right {
|
||||||
|
top: 40px;
|
||||||
|
right: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.main-title {
|
||||||
|
font-size: 48px;
|
||||||
|
font-weight: 900;
|
||||||
|
color: #333;
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 60px;
|
||||||
|
line-height: 1.2;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
.subtitle {
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: 400;
|
||||||
|
color: #333;
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 30px;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.contributor-name {
|
||||||
|
font-size: 56px;
|
||||||
|
font-weight: 700;
|
||||||
|
color: #333;
|
||||||
|
text-align: center;
|
||||||
|
margin: 15px 0 50px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mascot-image {
|
||||||
|
width: 450px;
|
||||||
|
height: 450px;
|
||||||
|
background-image: url('first_pr.png');
|
||||||
|
background-size: contain;
|
||||||
|
background-repeat: no-repeat;
|
||||||
|
background-position: center;
|
||||||
|
margin-top: 20px;
|
||||||
|
-webkit-print-color-adjust: exact;
|
||||||
|
print-color-adjust: exact;
|
||||||
|
}
|
||||||
|
|
||||||
|
.description {
|
||||||
|
font-size: 22px;
|
||||||
|
color: #333;
|
||||||
|
line-height: 1.6;
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.description strong {
|
||||||
|
font-weight: 700;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: 100%;
|
||||||
|
margin-top: auto;
|
||||||
|
padding-top: 30px;
|
||||||
|
border-top: 1px solid #ccc;
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-end;
|
||||||
|
font-size: 16px;
|
||||||
|
color: #333;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pr-info {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.signature {
|
||||||
|
text-align: right;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
@page {
|
||||||
|
size: A4 portrait;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
body, html {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
background: #fdfaf0;
|
||||||
|
}
|
||||||
|
.certificate {
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
box-shadow: none;
|
||||||
|
transform: scale(1);
|
||||||
|
border-radius: 0;
|
||||||
|
page-break-inside: avoid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="certificate">
|
||||||
|
<div class="party-popper top-left">🎉</div>
|
||||||
|
<div class="party-popper top-right">🎉</div>
|
||||||
|
|
||||||
|
<div class="main-title">Certificate of<br>Contribution</div>
|
||||||
|
<div class="subtitle">Awarded To</div>
|
||||||
|
|
||||||
|
<div class="contributor-name">[CONTRIBUTOR_NAME]</div>
|
||||||
|
|
||||||
|
<div class="mascot-image"></div>
|
||||||
|
|
||||||
|
<div class="description">
|
||||||
|
For successfully submitting and merging their <strong>First Pull Request</strong> to the <strong>Podman project</strong>.<br>
|
||||||
|
Your contribution helps make open source better—one PR at a time!
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="footer">
|
||||||
|
<div class="pr-info">
|
||||||
|
<div>🔧 Merged PR: [PR_LINK]</div>
|
||||||
|
<div style="margin-top: 5px;">[DATE]</div>
|
||||||
|
</div>
|
||||||
|
<div class="signature">
|
||||||
|
Keep hacking, keep contributing!<br>
|
||||||
|
– The Podman Community
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
Binary file not shown.
After Width: | Height: | Size: 578 KiB |
Binary file not shown.
After Width: | Height: | Size: 138 KiB |
Binary file not shown.
After Width: | Height: | Size: 138 KiB |
|
@ -1,11 +1,12 @@
|
||||||
FROM registry.fedoraproject.org/fedora-minimal:latest
|
FROM registry.fedoraproject.org/fedora-minimal:latest
|
||||||
RUN microdnf update -y && \
|
RUN microdnf update -y && \
|
||||||
microdnf install -y \
|
microdnf install -y \
|
||||||
findutils jq git curl \
|
findutils jq git curl python3-pyyaml \
|
||||||
perl-YAML perl-interpreter perl-open perl-Data-TreeDumper \
|
perl-YAML perl-interpreter perl-open perl-Data-TreeDumper \
|
||||||
perl-Test perl-Test-Simple perl-Test-Differences \
|
perl-Test perl-Test-Simple perl-Test-Differences \
|
||||||
perl-YAML-LibYAML perl-FindBin \
|
perl-YAML-LibYAML perl-FindBin \
|
||||||
python3 python3-pip gcc python3-devel && \
|
python3 python3-virtualenv python3-pip gcc python3-devel \
|
||||||
|
python3-flake8 python3-pep8-naming python3-flake8-import-order python3-flake8-polyfill python3-mccabe python3-pep8-naming && \
|
||||||
microdnf clean all && \
|
microdnf clean all && \
|
||||||
rm -rf /var/cache/dnf
|
rm -rf /var/cache/dnf
|
||||||
# Required by perl
|
# Required by perl
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
./testvenv/
|
|
@ -0,0 +1,43 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Installs cirrus-ci_artifacts and a python virtual environment
|
||||||
|
# to execute with. NOT intended to be used directly
|
||||||
|
# by humans, should only be used indirectly by running
|
||||||
|
# ../bin/install_automation.sh <ver> cirrus-ci_artifacts
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
||||||
|
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
||||||
|
|
||||||
|
INSTALL_PREFIX=$(realpath $AUTOMATION_LIB_PATH/../)
|
||||||
|
# Assume the directory this script is in, represents what is being installed
|
||||||
|
INSTALL_NAME=$(basename $(dirname ${BASH_SOURCE[0]}))
|
||||||
|
AUTOMATION_VERSION=$(automation_version)
|
||||||
|
[[ -n "$AUTOMATION_VERSION" ]] || \
|
||||||
|
die "Could not determine version of common automation libs, was 'install_automation.sh' successful?"
|
||||||
|
|
||||||
|
[[ -n "$(type -P virtualenv)" ]] || \
|
||||||
|
die "$INSTALL_NAME requires python3-virtualenv"
|
||||||
|
|
||||||
|
echo "Installing $INSTALL_NAME version $(automation_version) into $INSTALL_PREFIX"
|
||||||
|
|
||||||
|
unset INST_PERM_ARG
|
||||||
|
if [[ $UID -eq 0 ]]; then
|
||||||
|
INST_PERM_ARG="-o root -g root"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd $(dirname $(realpath "${BASH_SOURCE[0]}"))
|
||||||
|
virtualenv --clear --download \
|
||||||
|
$AUTOMATION_LIB_PATH/ccia.venv
|
||||||
|
(
|
||||||
|
source $AUTOMATION_LIB_PATH/ccia.venv/bin/activate
|
||||||
|
pip3 install --requirement ./requirements.txt
|
||||||
|
deactivate
|
||||||
|
)
|
||||||
|
install -v $INST_PERM_ARG -m '0644' -D -t "$INSTALL_PREFIX/lib/ccia.venv/bin" \
|
||||||
|
./cirrus-ci_artifacts.py
|
||||||
|
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/bin" ./cirrus-ci_artifacts
|
||||||
|
|
||||||
|
# Needed for installer testing
|
||||||
|
echo "Successfully installed $INSTALL_NAME"
|
|
@ -0,0 +1,33 @@
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This is a small script which examines a Cirrus-CI build and downloads
|
||||||
|
available artifacts in parallel, into a subdirectory tree corresponding
|
||||||
|
with the Cirrus-CI build ID, followed by the task-name, artifact-name
|
||||||
|
and file-path. Optionally, a regex may be provided to download only
|
||||||
|
specific artifacts matching the subdirectory path.
|
||||||
|
|
||||||
|
The script may be executed from a currently running Cirrus-CI build
|
||||||
|
(utilizing `$CIRRUS_BUILD_ID`), but only previously uploaded artifacts
|
||||||
|
will be downloaded, and the task must have a `depends_on` statement
|
||||||
|
to synchronize with tasks providing expected artifacts.
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
Install the python3 module requirements using pip3:
|
||||||
|
(Note: These go into `$HOME/.local/lib/python<version>`)
|
||||||
|
|
||||||
|
```
|
||||||
|
$ pip3 install --user --requirement ./requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
# Usage
|
||||||
|
|
||||||
|
Create and change to the directory where artifact tree should be
|
||||||
|
created. Call the script, passing in the following arguments:
|
||||||
|
|
||||||
|
1. Optional, `--verbose` prints out artifacts as they are
|
||||||
|
downloaded or skipped.
|
||||||
|
2. The Cirrus-CI build id (required) to retrieve (doesn't need to be
|
||||||
|
finished running).
|
||||||
|
3. Optional, a filter regex e.g. `'runner_stats/.*fedora.*'` to
|
||||||
|
only download artifacts matching `<task>/<artifact>/<file-path>`
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This script wrapps cirrus-ci_artifacts.sh inside a python
|
||||||
|
# virtual environment setup at install time. It should not
|
||||||
|
# be executed prior to installation.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# This is a convenience for callers that don't separately source this first
|
||||||
|
# in their automation setup.
|
||||||
|
if [[ -z "$AUTOMATION_LIB_PATH" ]] && [[ -r /etc/automation_environment ]]; then
|
||||||
|
source /etc/automation_environment
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "$AUTOMATION_LIB_PATH" ]]; then
|
||||||
|
(
|
||||||
|
echo "ERROR: Expecting \$AUTOMATION_LIB_PATH to be defined with the"
|
||||||
|
echo " installation directory of automation tooling."
|
||||||
|
) >> /dev/stderr
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
source $AUTOMATION_LIB_PATH/ccia.venv/bin/activate
|
||||||
|
exec python3 $AUTOMATION_LIB_PATH/ccia.venv/bin/cirrus-ci_artifacts.py "$@"
|
|
@ -0,0 +1,161 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download all artifacts from a Cirrus-CI Build into a subdirectory tree.
|
||||||
|
|
||||||
|
Subdirectory naming format: <build ID>/<task-name>/<artifact-name>/<file-path>
|
||||||
|
|
||||||
|
Input arguments (in order):
|
||||||
|
Build ID - string, the build containing tasks w/ artifacts to download
|
||||||
|
e.g. "5790771712360448"
|
||||||
|
Path RX - Optional, regular expression to match against subdirectory
|
||||||
|
tree naming format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from os import makedirs
|
||||||
|
from os.path import split
|
||||||
|
from urllib.parse import quote, unquote
|
||||||
|
|
||||||
|
# Ref: https://docs.aiohttp.org/en/stable/http_request_lifecycle.html
|
||||||
|
from aiohttp import ClientSession
|
||||||
|
# Ref: https://gql.readthedocs.io/en/latest/index.html
|
||||||
|
# pip3 install --user --requirement ./requirements.txt
|
||||||
|
# (and/or in a python virtual environment)
|
||||||
|
|
||||||
|
from gql import Client as GQLClient
|
||||||
|
from gql import gql
|
||||||
|
from gql.transport.requests import RequestsHTTPTransport
|
||||||
|
|
||||||
|
|
||||||
|
# GraphQL API URL for Cirrus-CI
|
||||||
|
CCI_GQL_URL = "https://api.cirrus-ci.com/graphql"
|
||||||
|
|
||||||
|
# Artifact download base-URL for Cirrus-CI.
|
||||||
|
# Download URL will be formed by appending:
|
||||||
|
# "/<CIRRUS_BUILD_ID>/<TASK NAME OR ALIAS>/<ARTIFACTS_NAME>/<PATH>"
|
||||||
|
CCI_ART_URL = "https://api.cirrus-ci.com/v1/artifact/build"
|
||||||
|
|
||||||
|
# Set True when --verbose is first argument
|
||||||
|
VERBOSE = False
|
||||||
|
|
||||||
|
def get_tasks(gqlclient, buildId): # noqa N803
|
||||||
|
"""Given a build ID, return a list of task objects."""
|
||||||
|
# Ref: https://cirrus-ci.org/api/
|
||||||
|
query = gql('''
|
||||||
|
query tasksByBuildId($buildId: ID!) {
|
||||||
|
build(id: $buildId) {
|
||||||
|
tasks {
|
||||||
|
name,
|
||||||
|
id,
|
||||||
|
buildId,
|
||||||
|
artifacts {
|
||||||
|
name,
|
||||||
|
files {
|
||||||
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
''')
|
||||||
|
query_vars = {"buildId": buildId}
|
||||||
|
tasks = gqlclient.execute(query, variable_values=query_vars)
|
||||||
|
if "build" in tasks and tasks["build"]:
|
||||||
|
b = tasks["build"]
|
||||||
|
if "tasks" in b and len(b["tasks"]):
|
||||||
|
return b["tasks"]
|
||||||
|
raise RuntimeError(f"No tasks found for build with ID {buildId}")
|
||||||
|
raise RuntimeError(f"No Cirrus-CI build found with ID {buildId}")
|
||||||
|
|
||||||
|
|
||||||
|
def task_art_url_sfxs(task):
|
||||||
|
"""Given a task dict return list CCI_ART_URL suffixes for all artifacts."""
|
||||||
|
result = []
|
||||||
|
bid = task["buildId"]
|
||||||
|
tname = quote(task["name"]) # Make safe for URLs
|
||||||
|
for art in task["artifacts"]:
|
||||||
|
aname = quote(art["name"])
|
||||||
|
for _file in art["files"]:
|
||||||
|
fpath = quote(_file["path"])
|
||||||
|
result.append(f"{bid}/{tname}/{aname}/{fpath}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def download_artifact(session, dest_path, dl_url):
|
||||||
|
"""Asynchronous download contents of art_url as a byte-stream."""
|
||||||
|
# Last path component assumed to be the filename
|
||||||
|
makedirs(split(dest_path)[0], exist_ok=True) # os.path.split
|
||||||
|
async with session.get(dl_url) as response:
|
||||||
|
with open(dest_path, "wb") as dest_file:
|
||||||
|
dest_file.write(await response.read())
|
||||||
|
|
||||||
|
|
||||||
|
async def download_artifacts(task, path_rx=None):
|
||||||
|
"""Given a task dict, download all artifacts or matches to path_rx."""
|
||||||
|
downloaded = []
|
||||||
|
skipped = []
|
||||||
|
async with ClientSession() as session:
|
||||||
|
for art_url_sfx in task_art_url_sfxs(task):
|
||||||
|
dest_path = unquote(art_url_sfx) # Strip off URL encoding
|
||||||
|
dl_url = f"{CCI_ART_URL}/{dest_path}"
|
||||||
|
if path_rx is None or bool(path_rx.search(dest_path)):
|
||||||
|
if VERBOSE:
|
||||||
|
print(f" Downloading '{dest_path}'")
|
||||||
|
sys.stdout.flush()
|
||||||
|
await download_artifact(session, dest_path, dl_url)
|
||||||
|
downloaded.append(dest_path)
|
||||||
|
else:
|
||||||
|
if VERBOSE:
|
||||||
|
print(f" Skipping '{dest_path}'")
|
||||||
|
skipped.append(dest_path)
|
||||||
|
return {"downloaded": downloaded, "skipped": skipped}
|
||||||
|
|
||||||
|
|
||||||
|
def get_args(argv):
|
||||||
|
"""Return parsed argument namespace object."""
|
||||||
|
parser = ArgumentParser(prog="cirrus-ci_artifacts",
|
||||||
|
description=('Download Cirrus-CI artifacts by Build ID'
|
||||||
|
' number, into a subdirectory of the form'
|
||||||
|
' <Build ID>/<Task Name>/<Artifact Name>'
|
||||||
|
'/<File Path>'))
|
||||||
|
parser.add_argument('-v', '--verbose',
|
||||||
|
dest='verbose', action='store_true', default=False,
|
||||||
|
help='Show "Downloaded" | "Skipped" + relative artifact file-path.')
|
||||||
|
parser.add_argument('buildId', nargs=1, metavar='<Build ID>', type=int,
|
||||||
|
help="A Cirrus-CI Build ID number.")
|
||||||
|
parser.add_argument('path_rx', nargs='?', default=None, metavar='[Reg. Exp.]',
|
||||||
|
help="Reg. exp. include only <task>/<artifact>/<file-path> matches.")
|
||||||
|
return parser.parse_args(args=argv[1:])
|
||||||
|
|
||||||
|
|
||||||
|
async def download(tasks, path_rx=None):
|
||||||
|
"""Return results from all async operations."""
|
||||||
|
# Python docs say to retain a reference to all tasks so they aren't
|
||||||
|
# "garbage-collected" while still active.
|
||||||
|
results = []
|
||||||
|
for task in tasks:
|
||||||
|
if len(task["artifacts"]):
|
||||||
|
results.append(asyncio.create_task(download_artifacts(task, path_rx)))
|
||||||
|
await asyncio.gather(*results)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def main(buildId, path_rx=None): # noqa: N803,D103
|
||||||
|
if path_rx is not None:
|
||||||
|
path_rx = re.compile(path_rx)
|
||||||
|
transport = RequestsHTTPTransport(url=CCI_GQL_URL, verify=True, retries=3)
|
||||||
|
with GQLClient(transport=transport, fetch_schema_from_transport=True) as gqlclient:
|
||||||
|
tasks = get_tasks(gqlclient, buildId)
|
||||||
|
transport.close()
|
||||||
|
async_results = asyncio.run(download(tasks, path_rx))
|
||||||
|
return [r.result() for r in async_results]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
args = get_args(sys.argv)
|
||||||
|
VERBOSE = args.verbose
|
||||||
|
main(args.buildId[0], args.path_rx)
|
|
@ -0,0 +1,19 @@
|
||||||
|
# Producing this list was done using the following process:
|
||||||
|
# 1. Create a temporary `req.txt` file containing only the basic
|
||||||
|
# non-distribution provided packages, e.g. `aiohttp[speedups]`,
|
||||||
|
# `PyYAML`, `gql[requests]`, `requests` (see cirrus-ci_artifacts.py,
|
||||||
|
# actual requirements may have changed)
|
||||||
|
# 2. From a Fedora:latest container, install python3 & python3-virtualenv
|
||||||
|
# 3. Setup & activate a temporary virtual environment
|
||||||
|
# 4. Execute `pip3 install --requirements req.txt`
|
||||||
|
# 5. Run pip3 freeze
|
||||||
|
# 6. Edit `requirements.txt`, add the `~=` specifier to each line along
|
||||||
|
# with the correct two-component version number (from freeze output)
|
||||||
|
# 7. In a fresh container, confirm the automation installer
|
||||||
|
# functions with the cirrus-ci_artifacts component (see main README
|
||||||
|
# for installer instructions)
|
||||||
|
PyYAML~=6.0
|
||||||
|
aiohttp[speedups]~=3.8
|
||||||
|
gql[requests]~=3.3
|
||||||
|
requests>=2,<3
|
||||||
|
urllib3<2.5.1
|
|
@ -0,0 +1 @@
|
||||||
|
../cirrus-ci_artifacts.py
|
|
@ -0,0 +1,29 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
TESTDIR=$(dirname ${BASH_SOURCE[0]})
|
||||||
|
|
||||||
|
if [[ "$GITHUB_ACTIONS" == "true" ]]; then
|
||||||
|
echo "Lint/Style checking not supported under github actions: Skipping"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -x $(type -P flake8-3) ]]; then
|
||||||
|
cd "$TESTDIR"
|
||||||
|
set -a
|
||||||
|
virtualenv testvenv
|
||||||
|
source testvenv/bin/activate
|
||||||
|
testvenv/bin/python -m pip install --upgrade pip
|
||||||
|
pip3 install --requirement ../requirements.txt
|
||||||
|
set +a
|
||||||
|
|
||||||
|
./test_cirrus-ci_artifacts.py -v
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
flake8-3 --max-line-length=100 ./cirrus-ci_artifacts.py
|
||||||
|
flake8-3 --max-line-length=100 --extend-ignore=D101,D102,D103,D105 test/test_cirrus-ci_artifacts.py
|
||||||
|
else
|
||||||
|
echo "Can't find flake-8-3 binary, is script executing inside CI container?"
|
||||||
|
exit 1
|
||||||
|
fi
|
|
@ -0,0 +1,194 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""Verify contents of .cirrus.yml meet specific expectations."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import unittest
|
||||||
|
from contextlib import redirect_stderr, redirect_stdout
|
||||||
|
from io import StringIO
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from unittest.mock import MagicMock, mock_open, patch
|
||||||
|
|
||||||
|
import ccia
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
def fake_makedirs(*args, **dargs):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Needed for testing asyncio functions and calls
|
||||||
|
# ref: https://agariinc.medium.com/strategies-for-testing-async-code-in-python-c52163f2deab
|
||||||
|
class AsyncMock(MagicMock):
|
||||||
|
|
||||||
|
async def __call__(self, *args, **dargs):
|
||||||
|
return super().__call__(*args, **dargs)
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncContextManager(MagicMock):
|
||||||
|
|
||||||
|
async def __aenter__(self, *args, **dargs):
|
||||||
|
return self.__enter__(*args, **dargs)
|
||||||
|
|
||||||
|
async def __aexit__(self, *args, **dargs):
|
||||||
|
return self.__exit__(*args, **dargs)
|
||||||
|
|
||||||
|
|
||||||
|
class TestBase(unittest.TestCase):
|
||||||
|
|
||||||
|
FAKE_CCI = "sql://fake.url.invalid/graphql"
|
||||||
|
FAKE_API = "smb://fake.url.invalid/artifact"
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
ccia.VERBOSE = True
|
||||||
|
patch('ccia.CCI_GQL_URL', new=self.FAKE_CCI).start()
|
||||||
|
patch('ccia.CCI_ART_URL', new=self.FAKE_API).start()
|
||||||
|
self.addCleanup(patch.stopall)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUtils(TestBase):
|
||||||
|
|
||||||
|
# YAML is easier on human eyeballs
|
||||||
|
# Ref: https://github.com/cirruslabs/cirrus-ci-web/blob/master/schema.graphql
|
||||||
|
# type Artifacts and ArtifactFileInfo
|
||||||
|
TEST_TASK_YAML = """
|
||||||
|
- &test_task
|
||||||
|
name: task_1
|
||||||
|
id: 1
|
||||||
|
buildId: 0987654321
|
||||||
|
artifacts:
|
||||||
|
- name: test_art-0
|
||||||
|
type: test_type-0
|
||||||
|
format: art_format-0
|
||||||
|
files:
|
||||||
|
- path: path/test/art/0
|
||||||
|
size: 0
|
||||||
|
- name: test_art-1
|
||||||
|
type: test_type-1
|
||||||
|
format: art_format-1
|
||||||
|
files:
|
||||||
|
- path: path/test/art/1
|
||||||
|
size: 1
|
||||||
|
- path: path/test/art/2
|
||||||
|
size: 2
|
||||||
|
- name: test_art-2
|
||||||
|
type: test_type-2
|
||||||
|
format: art_format-2
|
||||||
|
files:
|
||||||
|
- path: path/test/art/3
|
||||||
|
size: 3
|
||||||
|
- path: path/test/art/4
|
||||||
|
size: 4
|
||||||
|
- path: path/test/art/5
|
||||||
|
size: 5
|
||||||
|
- path: path/test/art/6
|
||||||
|
size: 6
|
||||||
|
- <<: *test_task
|
||||||
|
name: task_2
|
||||||
|
id: 2
|
||||||
|
"""
|
||||||
|
TEST_TASKS = yaml.safe_load(TEST_TASK_YAML)
|
||||||
|
TEST_URL_RX = re.compile(r"987654321/task_.+/test_art-.+/path/test/art/.+")
|
||||||
|
|
||||||
|
def test_task_art_url_sfxs(self):
|
||||||
|
for test_task in self.TEST_TASKS:
|
||||||
|
actual = ccia.task_art_url_sfxs(test_task)
|
||||||
|
with self.subTest(test_task=test_task):
|
||||||
|
for url in actual:
|
||||||
|
with self.subTest(url=url):
|
||||||
|
self.assertRegex(url, self.TEST_URL_RX)
|
||||||
|
|
||||||
|
# N/B: The ClientSession mock causes a (probably) harmless warning:
|
||||||
|
# ResourceWarning: unclosed transport <_SelectorSocketTransport fd=7>
|
||||||
|
# I have no idea how to fix or hide this, leaving it as-is.
|
||||||
|
def test_download_artifacts_all(self):
|
||||||
|
for test_task in self.TEST_TASKS:
|
||||||
|
with self.subTest(test_task=test_task), \
|
||||||
|
patch('ccia.download_artifact', new_callable=AsyncMock), \
|
||||||
|
patch('ccia.ClientSession', new_callable=AsyncContextManager), \
|
||||||
|
patch('ccia.makedirs', new=fake_makedirs), \
|
||||||
|
patch('ccia.open', new=mock_open()):
|
||||||
|
|
||||||
|
# N/B: This makes debugging VERY difficult, comment out for pdb use
|
||||||
|
fake_stdout = StringIO()
|
||||||
|
fake_stderr = StringIO()
|
||||||
|
with redirect_stderr(fake_stderr), redirect_stdout(fake_stdout):
|
||||||
|
asyncio.run(ccia.download_artifacts(test_task))
|
||||||
|
self.assertEqual(fake_stderr.getvalue(), '')
|
||||||
|
for line in fake_stdout.getvalue().splitlines():
|
||||||
|
with self.subTest(line=line):
|
||||||
|
self.assertRegex(line.strip(), self.TEST_URL_RX)
|
||||||
|
|
||||||
|
|
||||||
|
class TestMain(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
ccia.VERBOSE = True
|
||||||
|
try:
|
||||||
|
self.bid = os.environ["CIRRUS_BUILD_ID"]
|
||||||
|
except KeyError:
|
||||||
|
self.skipTest("Requires running under Cirrus-CI")
|
||||||
|
self.tmp = TemporaryDirectory(prefix="test_ccia_tmp")
|
||||||
|
self.cwd = os.getcwd()
|
||||||
|
os.chdir(self.tmp.name)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
os.chdir(self.cwd)
|
||||||
|
self.tmp.cleanup()
|
||||||
|
|
||||||
|
def main_result_has(self, results, stdout_filepath, action="downloaded"):
|
||||||
|
for result in results:
|
||||||
|
for action_filepath in result[action]:
|
||||||
|
if action_filepath == stdout_filepath:
|
||||||
|
exists = os.path.isfile(os.path.join(self.tmp.name, action_filepath))
|
||||||
|
if "downloaded" in action:
|
||||||
|
self.assertTrue(exists,
|
||||||
|
msg=f"Downloaded not found: '{action_filepath}'")
|
||||||
|
return
|
||||||
|
# action==skipped
|
||||||
|
self.assertFalse(exists,
|
||||||
|
msg=f"Skipped file found: '{action_filepath}'")
|
||||||
|
return
|
||||||
|
self.fail(f"Expecting to find {action_filepath} entry in main()'s {action} results")
|
||||||
|
|
||||||
|
def test_cirrus_ci_download_all(self):
|
||||||
|
expect_rx = re.compile(f".+'{self.bid}/[^/]+/[^/]+/.+'")
|
||||||
|
# N/B: This makes debugging VERY difficult, comment out for pdb use
|
||||||
|
fake_stdout = StringIO()
|
||||||
|
fake_stderr = StringIO()
|
||||||
|
with redirect_stderr(fake_stderr), redirect_stdout(fake_stdout):
|
||||||
|
import warnings
|
||||||
|
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||||
|
results = ccia.main(self.bid)
|
||||||
|
self.assertEqual(fake_stderr.getvalue(), '')
|
||||||
|
for line in fake_stdout.getvalue().splitlines():
|
||||||
|
with self.subTest(line=line):
|
||||||
|
s_line = line.lower().strip()
|
||||||
|
filepath = line.split(sep="'", maxsplit=3)[1]
|
||||||
|
self.assertRegex(s_line, expect_rx)
|
||||||
|
if s_line.startswith("download"):
|
||||||
|
self.main_result_has(results, filepath)
|
||||||
|
elif s_line.startswith("skip"):
|
||||||
|
self.main_result_has(results, filepath, "skipped")
|
||||||
|
else:
|
||||||
|
self.fail(f"Unexpected stdout line: '{s_line}'")
|
||||||
|
|
||||||
|
def test_cirrus_ci_download_none(self):
|
||||||
|
# N/B: This makes debugging VERY difficult, comment out for pdb use
|
||||||
|
fake_stdout = StringIO()
|
||||||
|
fake_stderr = StringIO()
|
||||||
|
with redirect_stderr(fake_stderr), redirect_stdout(fake_stdout):
|
||||||
|
results = ccia.main(self.bid, r"this-will-match-nothing")
|
||||||
|
for line in fake_stdout.getvalue().splitlines():
|
||||||
|
with self.subTest(line=line):
|
||||||
|
s_line = line.lower().strip()
|
||||||
|
filepath = line.split(sep="'", maxsplit=3)[1]
|
||||||
|
self.assertRegex(s_line, r"skipping")
|
||||||
|
self.main_result_has(results, filepath, "skipped")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
|
@ -1,46 +0,0 @@
|
||||||
# Description
|
|
||||||
|
|
||||||
This is a quickly hacked-together script which examines a Cirrus-CI
|
|
||||||
build and prints out task IDs and names based on their status. Additionally,
|
|
||||||
it will specifically detect and list task IDs which have exhibited
|
|
||||||
an "CI agent stopped responding!" condition using the status code
|
|
||||||
`CIASR`.
|
|
||||||
|
|
||||||
The output format is very simple: Each line is composed of the
|
|
||||||
task status (all caps) followed by a comma-separated list
|
|
||||||
of task IDs, a colon, and quoted task name.
|
|
||||||
|
|
||||||
# Installation
|
|
||||||
|
|
||||||
Install the python3 module requirements using pip3:
|
|
||||||
(Note: These go into `$HOME/.local/lib/python<version>`)
|
|
||||||
|
|
||||||
```
|
|
||||||
$ pip3 install --user --requirement ./requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
# Usage
|
|
||||||
|
|
||||||
Simply execute the script, providing as arguments:
|
|
||||||
|
|
||||||
1. The *user* component of a github repository
|
|
||||||
2. The *name* component of a github repository
|
|
||||||
3. The *commit SHA* for the target Cirrus-CI build
|
|
||||||
|
|
||||||
# Example: Build monitoring
|
|
||||||
|
|
||||||
```
|
|
||||||
$ watch -n 5 ./cirrus-ci_asr.py containers podman 5d1f8dcea1401854291932d11bea6aa6920a5682
|
|
||||||
|
|
||||||
CREATED 6720901876023296:"int podman fedora-32 root host",4521878620471296:"int remote fedora-32 root host",5647778527313920:"int podman fedora-32 rootless host",5084828573892608:"sys podman fedora-32 root host",6210728480735232:"sys remote fedora-32 root host",4803353597181952:"sys podman fedora-32 rootless host"
|
|
||||||
TRIGGERED
|
|
||||||
SCHEDULED
|
|
||||||
EXECUTING 5595001969180672:"Build for fedora-32"
|
|
||||||
ABORTED
|
|
||||||
FAILED
|
|
||||||
COMPLETED 5032052015759360:"Ext. services",6157951922601984:"Smoke Test"
|
|
||||||
SKIPPED
|
|
||||||
PAUSED
|
|
||||||
CIASR
|
|
||||||
(updates every 5 seconds)
|
|
||||||
```
|
|
|
@ -1,136 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
"""Print list of agent-stopped-responding task IDs and status-keyed task IDs"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from collections import namedtuple
|
|
||||||
from pprint import pprint
|
|
||||||
|
|
||||||
# Ref: https://gql.readthedocs.io/en/latest/index.html
|
|
||||||
# pip3 install --user --requirement ./requirements.txt
|
|
||||||
# (and/or in a python virtual environment)
|
|
||||||
from gql import gql, Client
|
|
||||||
from gql.transport.requests import RequestsHTTPTransport
|
|
||||||
|
|
||||||
CIRRUS_CI_STATUSES = (
|
|
||||||
"CREATED",
|
|
||||||
"TRIGGERED",
|
|
||||||
"SCHEDULED",
|
|
||||||
"EXECUTING",
|
|
||||||
"ABORTED",
|
|
||||||
"FAILED",
|
|
||||||
"COMPLETED",
|
|
||||||
"SKIPPED",
|
|
||||||
"PAUSED"
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_raw_builds(client, owner, repo, sha):
|
|
||||||
"""Retrieve list of builds for the specified owner/repo @ commit SHA"""
|
|
||||||
# Generated using https://cirrus-ci.com/explorer
|
|
||||||
query = gql('''
|
|
||||||
query buildBySha($owner: String!, $repo: String!, $sha: String!) {
|
|
||||||
searchBuilds(repositoryOwner: $owner, repositoryName: $repo, SHA: $sha) {
|
|
||||||
id
|
|
||||||
buildCreatedTimestamp
|
|
||||||
}
|
|
||||||
}
|
|
||||||
''')
|
|
||||||
query_vars = dict(owner=owner, repo=repo, sha=sha)
|
|
||||||
result = client.execute(query, variable_values=query_vars)
|
|
||||||
if "searchBuilds" in result and len(result["searchBuilds"]):
|
|
||||||
return result["searchBuilds"]
|
|
||||||
else:
|
|
||||||
raise RuntimeError(f"No Cirrus-CI builds found for {owner}/{repo} commit {sha}")
|
|
||||||
|
|
||||||
|
|
||||||
def latest_build_id(raw_builds):
|
|
||||||
"""Return the build_id of the most recent build among raw_builds"""
|
|
||||||
latest_ts = 0
|
|
||||||
latest_bid = 0
|
|
||||||
for build in raw_builds:
|
|
||||||
bts = build["buildCreatedTimestamp"]
|
|
||||||
if bts > latest_ts:
|
|
||||||
latest_ts = bts
|
|
||||||
latest_bid = build["id"]
|
|
||||||
if latest_bid:
|
|
||||||
return latest_bid
|
|
||||||
raise RuntimeError(f"Empty raw_builds list")
|
|
||||||
|
|
||||||
|
|
||||||
def get_raw_tasks(client, build_id):
|
|
||||||
"""Retrieve raw GraphQL task list from a build"""
|
|
||||||
query = gql('''
|
|
||||||
query tasksByBuildID($build_id: ID!) {
|
|
||||||
build(id: $build_id) {
|
|
||||||
tasks {
|
|
||||||
name
|
|
||||||
id
|
|
||||||
status
|
|
||||||
notifications {
|
|
||||||
level
|
|
||||||
message
|
|
||||||
}
|
|
||||||
automaticReRun
|
|
||||||
previousRuns {
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
''')
|
|
||||||
query_vars = dict(build_id=build_id)
|
|
||||||
result = client.execute(query, variable_values=query_vars)
|
|
||||||
if "build" in result and result["build"]:
|
|
||||||
result = result["build"]
|
|
||||||
if "tasks" in result and len(result["tasks"]):
|
|
||||||
return result["tasks"]
|
|
||||||
else:
|
|
||||||
raise RuntimeError(f"No tasks found for build with id {build_id}")
|
|
||||||
else:
|
|
||||||
raise RuntimeError(f"No Cirrus-CI build found with id {build_id}")
|
|
||||||
|
|
||||||
|
|
||||||
def status_tid_names(raw_tasks, status):
|
|
||||||
"""Return dictionary of task IDs to task names with specified status"""
|
|
||||||
return dict([(task["id"], task["name"])
|
|
||||||
for task in raw_tasks
|
|
||||||
if task["status"] == status])
|
|
||||||
|
|
||||||
|
|
||||||
def notif_tids(raw_tasks, reason):
|
|
||||||
"""Return dictionary of task IDs to task names which match notification reason"""
|
|
||||||
result={}
|
|
||||||
for task in raw_tasks:
|
|
||||||
for notif in task["notifications"]:
|
|
||||||
if reason in notif["message"]:
|
|
||||||
result[task["id"]] = task["name"]
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def output_tids(keyword, tid_names):
|
|
||||||
"""Write line of space separated list of task ID:"name" prefixed by a keyword"""
|
|
||||||
sys.stdout.write(f'{keyword} ')
|
|
||||||
tasks=[f'{tid}:"{name}"' for tid, name in tid_names.items()]
|
|
||||||
sys.stdout.write(",".join(tasks))
|
|
||||||
sys.stdout.write("\n")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Ref: https://cirrus-ci.org/api/
|
|
||||||
cirrus_graphql_url = "https://api.cirrus-ci.com/graphql"
|
|
||||||
cirrus_graphql_xport = RequestsHTTPTransport(
|
|
||||||
url=cirrus_graphql_url,
|
|
||||||
verify=True,
|
|
||||||
retries=3)
|
|
||||||
client = Client(transport=cirrus_graphql_xport, fetch_schema_from_transport=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
raw_builds = get_raw_builds(client, sys.argv[1], sys.argv[2], sys.argv[3])
|
|
||||||
except IndexError as xcpt:
|
|
||||||
print(f"Error: argument {xcpt}\n\nUsage: {sys.argv[0]} <user> <repo> <sha>")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
raw_tasks = get_raw_tasks(client, latest_build_id(raw_builds))
|
|
||||||
for cci_status in CIRRUS_CI_STATUSES:
|
|
||||||
output_tids(cci_status, status_tid_names(raw_tasks, cci_status))
|
|
||||||
output_tids("CIASR", notif_tids(raw_tasks, "CI agent stopped responding!"))
|
|
|
@ -1,9 +0,0 @@
|
||||||
aiohttp==3.6.2
|
|
||||||
async-timeout==3.0.1
|
|
||||||
attrs==20.2.0
|
|
||||||
certifi==2020.6.20
|
|
||||||
gql==3.0.0a3
|
|
||||||
multidict==4.7.6
|
|
||||||
requests==2.24.0
|
|
||||||
websockets==8.1
|
|
||||||
yarl==1.5.1
|
|
|
@ -1,15 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Stupid-simple, very basic "can it run" test
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [[ "$CIRRUS_CI" != "true" ]]; then
|
|
||||||
echo -e "\nSkipping: Test must be executed under Cirrus-CI\n"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd "$(dirname ${BASH_SOURCE[0]})/../"
|
|
||||||
pip3 install --user --requirement ./requirements.txt
|
|
||||||
echo "Testing cirrus-ci_asr.py $CIRRUS_REPO_OWNER $CIRRUS_REPO_NAME $CIRRUS_CHANGE_IN_REPO"
|
|
||||||
./cirrus-ci_asr.py $CIRRUS_REPO_OWNER $CIRRUS_REPO_NAME $CIRRUS_CHANGE_IN_REPO
|
|
|
@ -1,7 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
# Installs cirrus-ci_retrospective system-wide. NOT intended to be used directly
|
# Installs cirrus-ci_env system-wide. NOT intended to be used directly
|
||||||
# by humans, should only be used indirectly by running
|
# by humans, should only be used indirectly by running
|
||||||
# ../bin/install_automation.sh <ver> cirrus-ci_retrospective
|
# ../bin/install_automation.sh <ver> cirrus-ci_env
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
||||||
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
||||||
|
@ -21,9 +24,7 @@ if [[ $UID -eq 0 ]]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cd $(dirname $(realpath "${BASH_SOURCE[0]}"))
|
cd $(dirname $(realpath "${BASH_SOURCE[0]}"))
|
||||||
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/bin" ./bin/*
|
install -v cirrus-ci_env.py -D "$INSTALL_PREFIX/bin/"
|
||||||
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/lib" ./lib/*
|
|
||||||
install -v $INST_PERM_ARG lib/git_unattended_gpg.sh.in "$INSTALL_PREFIX/lib/"
|
|
||||||
|
|
||||||
# Needed for installer testing
|
# Needed for installer testing
|
||||||
echo "Successfully installed $INSTALL_NAME"
|
echo "Successfully installed $INSTALL_NAME"
|
|
@ -0,0 +1,325 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""Utility to provide canonical listing of Cirrus-CI tasks and env. vars."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from traceback import extract_stack
|
||||||
|
from typing import Any, Mapping
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
def dbg(msg: str) -> None:
|
||||||
|
"""Shorthand for calling logging.debug()."""
|
||||||
|
caller = extract_stack(limit=2)[0]
|
||||||
|
logging.debug(msg, extra=dict(loc=f'(line {caller.lineno})'))
|
||||||
|
|
||||||
|
|
||||||
|
def err(msg: str) -> None:
|
||||||
|
"""Print an error message to stderr and exit non-zero."""
|
||||||
|
caller = extract_stack(limit=2)[0]
|
||||||
|
logging.error(msg, extra=dict(loc=f'(line {caller.lineno})'))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
class DefFmt(dict):
|
||||||
|
"""
|
||||||
|
Defaulting-dict helper class for render_env()'s str.format_map().
|
||||||
|
|
||||||
|
See: https://docs.python.org/3.7/library/stdtypes.html#str.format_map
|
||||||
|
"""
|
||||||
|
|
||||||
|
dollar_env_var = re.compile(r"\$(\w+)")
|
||||||
|
dollarcurly_env_var = re.compile(r"\$\{(\w+)\}")
|
||||||
|
|
||||||
|
def __missing__(self, key: str) -> str:
|
||||||
|
"""Not-found items converted back to shell env var format."""
|
||||||
|
return "${{{0}}}".format(key)
|
||||||
|
|
||||||
|
|
||||||
|
class CirrusCfg:
|
||||||
|
"""Represent a fully realized list of .cirrus.yml tasks."""
|
||||||
|
|
||||||
|
# Dictionary of global, configuration-wide environment variable values.
|
||||||
|
global_env = None
|
||||||
|
|
||||||
|
# String values representing instance type and image name/path/uri
|
||||||
|
global_type = None
|
||||||
|
global_image = None
|
||||||
|
|
||||||
|
# Tracks task-parsing status, internal-only, do not use.
|
||||||
|
_working = None
|
||||||
|
|
||||||
|
def __init__(self, config: Mapping[str, Any]) -> None:
|
||||||
|
"""Create a new instance, given a parsed .cirrus.yml config object."""
|
||||||
|
if not isinstance(config, dict):
|
||||||
|
whatsit = config.__class__
|
||||||
|
raise TypeError(f"Expected 'config' argument to be a dictionary, not a {whatsit}")
|
||||||
|
CirrusCfg._working = "global"
|
||||||
|
# This makes a copy, doesn't touch the original
|
||||||
|
self.global_env = self.render_env(config.get("env", dict()))
|
||||||
|
dbg(f"Rendered globals: {self.global_env}")
|
||||||
|
self.global_type, self.global_image = self.get_type_image(config)
|
||||||
|
dbg(f"Using global type '{self.global_type}' and image '{self.global_image}'")
|
||||||
|
self.tasks = self.render_tasks(config)
|
||||||
|
dbg(f"Processed {len(self.tasks)} tasks")
|
||||||
|
self.names = list(self.tasks.keys())
|
||||||
|
self.names.sort()
|
||||||
|
self.names = tuple(self.names) # help notice attempts to modify
|
||||||
|
|
||||||
|
def render_env(self, env: Mapping[str, str]) -> Mapping[str, str]:
|
||||||
|
"""
|
||||||
|
Repeatedly call format_env() to render out-of-order env key values.
|
||||||
|
|
||||||
|
This is a compromise vs recursion. Since substitution values may be
|
||||||
|
referenced while processing, and dictionary keys have no defined
|
||||||
|
order. Simply provide multiple chances for the substitution to
|
||||||
|
occur. On failure, a shell-compatible variable reference is simply
|
||||||
|
left in place.
|
||||||
|
"""
|
||||||
|
# There's no simple way to detect when substitutions are
|
||||||
|
# complete, so we mirror Cirrus-CI's behavior which
|
||||||
|
# loops 10 times (according to their support) through
|
||||||
|
# the substitution routine.
|
||||||
|
out = self.format_env(env, self.global_env)
|
||||||
|
for _ in range(9):
|
||||||
|
out = self.format_env(out, self.global_env)
|
||||||
|
return out
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def format_env(env, global_env: Mapping[str, str]) -> Mapping[str, str]:
|
||||||
|
"""Replace shell-style references in env values, from global_env then env."""
|
||||||
|
# This method is also used to initialize self.global_env
|
||||||
|
if global_env is None:
|
||||||
|
global_env = dict()
|
||||||
|
|
||||||
|
rep = r"{\1}" # Shell env var to python format string conversion regex
|
||||||
|
def_fmt = DefFmt(**global_env) # Assumes global_env already rendered
|
||||||
|
|
||||||
|
for k, v in env.items():
|
||||||
|
if "ENCRYPTED" in str(v):
|
||||||
|
continue
|
||||||
|
elif k == "PATH":
|
||||||
|
# Handled specially by Cirrus, preserve value as-is.
|
||||||
|
def_fmt[k] = str(v)
|
||||||
|
continue
|
||||||
|
_ = def_fmt.dollarcurly_env_var.sub(rep, str(v))
|
||||||
|
def_fmt[k] = def_fmt.dollar_env_var.sub(rep, _)
|
||||||
|
out = dict()
|
||||||
|
for k, v in def_fmt.items():
|
||||||
|
if k in env: # Don't unnecessarily duplicate globals
|
||||||
|
if k == "PATH":
|
||||||
|
out[k] = str(v)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
out[k] = str(v).format_map(def_fmt)
|
||||||
|
except ValueError as xcpt:
|
||||||
|
if k == 'matrix':
|
||||||
|
err(f"Unsupported '{k}' key encountered in"
|
||||||
|
f" 'env' attribute of '{CirrusCfg._working}' task")
|
||||||
|
raise xcpt
|
||||||
|
return out
|
||||||
|
|
||||||
|
def render_tasks(self, tasks: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||||
|
"""Return new tasks dict with envs rendered and matrices unrolled."""
|
||||||
|
result = dict()
|
||||||
|
for k, v in tasks.items():
|
||||||
|
if not k.endswith("_task"):
|
||||||
|
continue
|
||||||
|
# Cirrus-CI uses this defaulting priority order
|
||||||
|
alias = v.get("alias", k.replace("_task", ""))
|
||||||
|
name = v.get("name", alias)
|
||||||
|
if "matrix" in v:
|
||||||
|
dbg(f"Processing matrix '{alias}'")
|
||||||
|
CirrusCfg._working = alias
|
||||||
|
# Assume Cirrus-CI accepted this config., don't check name clashes
|
||||||
|
result.update(self.unroll_matrix(name, alias, v))
|
||||||
|
CirrusCfg._working = 'global'
|
||||||
|
else:
|
||||||
|
dbg(f"Processing task '{name}'")
|
||||||
|
CirrusCfg._working = name
|
||||||
|
task = dict(alias=alias)
|
||||||
|
task["env"] = self.render_env(v.get("env", dict()))
|
||||||
|
task_name = self.render_value(name, task["env"])
|
||||||
|
_ = self.get_type_image(v, self.global_type, self.global_image)
|
||||||
|
self.init_task_type_image(task, *_)
|
||||||
|
result[task_name] = task
|
||||||
|
CirrusCfg._working = 'global'
|
||||||
|
return result
|
||||||
|
|
||||||
|
def unroll_matrix(self, name_default: str, alias_default: str,
|
||||||
|
task: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||||
|
"""Produce copies of task with attributes replaced from matrix list."""
|
||||||
|
result = dict()
|
||||||
|
for item in task["matrix"]:
|
||||||
|
if "name" not in task and "name" not in item:
|
||||||
|
# Cirrus-CI goes a step further, attempting to generate a
|
||||||
|
# unique name based on alias + matrix attributes. This is
|
||||||
|
# a very complex process that would be insane to attempt to
|
||||||
|
# duplicate. Instead, simply require a defined 'name'
|
||||||
|
# attribute in every case, throwing an error if not found.
|
||||||
|
raise ValueError(f"Expecting 'name' attribute in"
|
||||||
|
f" '{alias_default}_task'"
|
||||||
|
f" or matrix definition: {item}"
|
||||||
|
f" for task definition: {task}")
|
||||||
|
# default values for the rendered task - not mutable, needs a copy.
|
||||||
|
matrix_task = dict(alias=alias_default, env=task.get("env", dict()).copy())
|
||||||
|
matrix_name = item.get("name", name_default)
|
||||||
|
CirrusCfg._working = matrix_name
|
||||||
|
|
||||||
|
# matrix item env. overwrites task env.
|
||||||
|
matrix_task["env"].update(item.get("env", dict()))
|
||||||
|
matrix_task["env"] = self.render_env(matrix_task["env"])
|
||||||
|
matrix_name = self.render_value(matrix_name, matrix_task["env"])
|
||||||
|
dbg(f" Unrolling matrix for '{matrix_name}'")
|
||||||
|
CirrusCfg._working = matrix_name
|
||||||
|
|
||||||
|
# Matrix item overrides task dict, overrides global defaults.
|
||||||
|
_ = self.get_type_image(item, self.global_type, self.global_image)
|
||||||
|
matrix_type, matrix_image = self.get_type_image(task, *_)
|
||||||
|
self.init_task_type_image(matrix_task, matrix_type, matrix_image)
|
||||||
|
result[matrix_name] = matrix_task
|
||||||
|
return result
|
||||||
|
|
||||||
|
def render_value(self, value: str, env: Mapping[str, str]) -> str:
|
||||||
|
"""Given a string value and task env dict, safely render references."""
|
||||||
|
tmp_env = env.copy() # don't mess up the original
|
||||||
|
tmp_env["__value__"] = value
|
||||||
|
return self.format_env(tmp_env, self.global_env)["__value__"]
|
||||||
|
|
||||||
|
def get_type_image(self, item: dict,
|
||||||
|
default_type: str = None,
|
||||||
|
default_image: str = None) -> tuple:
|
||||||
|
"""Given Cirrus-CI config or task dict., return instance type and image."""
|
||||||
|
# Order is significant, VMs always override containers
|
||||||
|
if "gce_instance" in item:
|
||||||
|
return "gcevm", item["gce_instance"].get("image_name", default_image)
|
||||||
|
if "ec2_instance" in item:
|
||||||
|
return "ec2vm", item["ec2_instance"].get("image", default_image)
|
||||||
|
elif "osx_instance" in item or "macos_instance" in item:
|
||||||
|
_ = item.get("osx_instance", item.get("macos_instance"))
|
||||||
|
return "osx", _.get("image", default_image)
|
||||||
|
elif "image" in item.get("windows_container", ""):
|
||||||
|
return "wincntnr", item["windows_container"].get("image", default_image)
|
||||||
|
elif "image" in item.get("container", ""):
|
||||||
|
return "container", item["container"].get("image", default_image)
|
||||||
|
elif "dockerfile" in item.get("container", ""):
|
||||||
|
return "dockerfile", item["container"].get("dockerfile", default_image)
|
||||||
|
else:
|
||||||
|
inst_type = "unsupported"
|
||||||
|
if self.global_type is not None:
|
||||||
|
inst_type = default_type
|
||||||
|
inst_image = "unknown"
|
||||||
|
if self.global_image is not None:
|
||||||
|
inst_image = default_image
|
||||||
|
return inst_type, inst_image
|
||||||
|
|
||||||
|
def init_task_type_image(self, task: Mapping[str, Any],
|
||||||
|
task_type: str, task_image: str) -> None:
|
||||||
|
"""Render any envs. and assert non-none values for task."""
|
||||||
|
if task_type is None or task_image is None:
|
||||||
|
raise ValueError(f"Invalid instance type "
|
||||||
|
f"({task_type}) or image ({task_image}) "
|
||||||
|
f"for task ({task})")
|
||||||
|
task["inst_type"] = task_type
|
||||||
|
inst_image = self.render_value(task_image, task["env"])
|
||||||
|
task["inst_image"] = inst_image
|
||||||
|
dbg(f" Using type '{task_type}' and image '{inst_image}'")
|
||||||
|
|
||||||
|
|
||||||
|
class CLI:
|
||||||
|
"""Represent command-line-interface runtime state and behaviors."""
|
||||||
|
|
||||||
|
# An argparse parser instance
|
||||||
|
parser = None
|
||||||
|
|
||||||
|
# When valid, namespace instance from parser
|
||||||
|
args = None
|
||||||
|
|
||||||
|
# When loaded successfully, instance of CirrusCFG
|
||||||
|
ccfg = None
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize runtime context based on command-line options and parameters."""
|
||||||
|
self.parser = self.args_parser()
|
||||||
|
self.args = self.parser.parse_args()
|
||||||
|
|
||||||
|
# loc will be added at dbg() call time.
|
||||||
|
logging.basicConfig(format='{levelname}: {message} {loc}', style='{')
|
||||||
|
logger = logging.getLogger()
|
||||||
|
if self.args.debug:
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
dbg("Debugging enabled")
|
||||||
|
else:
|
||||||
|
logger.setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
self.ccfg = CirrusCfg(yaml.safe_load(self.args.filepath))
|
||||||
|
if not len(self.ccfg.names):
|
||||||
|
self.parser.print_help()
|
||||||
|
err(f"No Cirrus-CI tasks found in '{self.args.filepath.name}'")
|
||||||
|
|
||||||
|
def __call__(self) -> None:
|
||||||
|
"""Execute request command-line actions."""
|
||||||
|
if self.args.list:
|
||||||
|
dbg("Will be listing task names")
|
||||||
|
for task_name in self.ccfg.names:
|
||||||
|
sys.stdout.write(f"{task_name}\n")
|
||||||
|
elif bool(self.args.inst):
|
||||||
|
dbg("Will be showing task inst. type and image")
|
||||||
|
task = self.ccfg.tasks[self.valid_name()]
|
||||||
|
inst_type = task['inst_type']
|
||||||
|
inst_image = task['inst_image']
|
||||||
|
sys.stdout.write(f"{inst_type} {inst_image}\n")
|
||||||
|
elif bool(self.args.envs):
|
||||||
|
dbg("Will be listing task env. vars.")
|
||||||
|
task = self.ccfg.tasks[self.valid_name()]
|
||||||
|
env = self.ccfg.global_env.copy()
|
||||||
|
env.update(task['env'])
|
||||||
|
keys = list(env.keys())
|
||||||
|
keys.sort()
|
||||||
|
for key in keys:
|
||||||
|
if key.startswith("_"):
|
||||||
|
continue # Assume private to Cirrus-CI
|
||||||
|
value = env[key]
|
||||||
|
sys.stdout.write(f'{key}="{value}"\n')
|
||||||
|
|
||||||
|
def args_parser(self) -> argparse.ArgumentParser:
|
||||||
|
"""Parse command-line options and arguments."""
|
||||||
|
epilog = "Note: One of --list, --envs, or --inst MUST be specified"
|
||||||
|
parser = argparse.ArgumentParser(description=__doc__,
|
||||||
|
epilog=epilog)
|
||||||
|
parser.add_argument('filepath', type=argparse.FileType("rt"),
|
||||||
|
help="File path to .cirrus.yml",
|
||||||
|
metavar='<filepath>')
|
||||||
|
parser.add_argument('--debug', action='store_true',
|
||||||
|
help="Enable output of debbuging messages")
|
||||||
|
mgroup = parser.add_mutually_exclusive_group(required=True)
|
||||||
|
mgroup.add_argument('--list', action='store_true',
|
||||||
|
help="List canonical task names")
|
||||||
|
mgroup.add_argument('--envs', action='store',
|
||||||
|
help="List env. vars. for task <name>",
|
||||||
|
metavar="<name>")
|
||||||
|
mgroup.add_argument('--inst', action='store',
|
||||||
|
help="List instance type and image for task <name>",
|
||||||
|
metavar="<name>")
|
||||||
|
return parser
|
||||||
|
|
||||||
|
def valid_name(self) -> str:
|
||||||
|
"""Print helpful error message when task name is invalid, or return it."""
|
||||||
|
if self.args.envs is not None:
|
||||||
|
task_name = self.args.envs
|
||||||
|
else:
|
||||||
|
task_name = self.args.inst
|
||||||
|
file_name = self.args.filepath.name
|
||||||
|
if task_name not in self.ccfg.names:
|
||||||
|
self.parser.print_help()
|
||||||
|
err(f"Unknown task name '{task_name}' from '{file_name}'")
|
||||||
|
return task_name
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli = CLI()
|
||||||
|
cli()
|
|
@ -0,0 +1,792 @@
|
||||||
|
---
|
||||||
|
|
||||||
|
# Main collection of env. vars to set for all tasks and scripts.
|
||||||
|
env:
|
||||||
|
####
|
||||||
|
#### Global variables used for all tasks
|
||||||
|
####
|
||||||
|
# Name of the ultimate destination branch for this CI run, PR or post-merge.
|
||||||
|
DEST_BRANCH: "master"
|
||||||
|
# Overrides default location (/tmp/cirrus) for repo clone
|
||||||
|
GOPATH: &gopath "/var/tmp/go"
|
||||||
|
GOBIN: "${GOPATH}/bin"
|
||||||
|
GOCACHE: "${GOPATH}/cache"
|
||||||
|
GOSRC: &gosrc "/var/tmp/go/src/github.com/containers/podman"
|
||||||
|
CIRRUS_WORKING_DIR: *gosrc
|
||||||
|
# The default is 'sh' if unspecified
|
||||||
|
CIRRUS_SHELL: "/bin/bash"
|
||||||
|
# Save a little typing (path relative to $CIRRUS_WORKING_DIR)
|
||||||
|
SCRIPT_BASE: "./contrib/cirrus"
|
||||||
|
# Runner statistics log file path/name
|
||||||
|
STATS_LOGFILE_SFX: 'runner_stats.log'
|
||||||
|
STATS_LOGFILE: '$GOSRC/${CIRRUS_TASK_NAME}-${STATS_LOGFILE_SFX}'
|
||||||
|
|
||||||
|
####
|
||||||
|
#### Cache-image names to test with (double-quotes around names are critical)
|
||||||
|
####
|
||||||
|
FEDORA_NAME: "fedora-33"
|
||||||
|
PRIOR_FEDORA_NAME: "fedora-32"
|
||||||
|
UBUNTU_NAME: "ubuntu-2010"
|
||||||
|
PRIOR_UBUNTU_NAME: "ubuntu-2004"
|
||||||
|
|
||||||
|
# Google-cloud VM Images
|
||||||
|
IMAGE_SUFFIX: "c6524344056676352"
|
||||||
|
FEDORA_AMI_ID: "ami-04f37091c3ec43890"
|
||||||
|
FEDORA_CACHE_IMAGE_NAME: "fedora-${IMAGE_SUFFIX}"
|
||||||
|
PRIOR_FEDORA_CACHE_IMAGE_NAME: "prior-fedora-${IMAGE_SUFFIX}"
|
||||||
|
UBUNTU_CACHE_IMAGE_NAME: "ubuntu-${IMAGE_SUFFIX}"
|
||||||
|
PRIOR_UBUNTU_CACHE_IMAGE_NAME: "prior-ubuntu-${IMAGE_SUFFIX}"
|
||||||
|
|
||||||
|
# Container FQIN's
|
||||||
|
FEDORA_CONTAINER_FQIN: "quay.io/libpod/fedora_podman:${IMAGE_SUFFIX}"
|
||||||
|
PRIOR_FEDORA_CONTAINER_FQIN: "quay.io/libpod/prior-fedora_podman:${IMAGE_SUFFIX}"
|
||||||
|
UBUNTU_CONTAINER_FQIN: "quay.io/libpod/ubuntu_podman:${IMAGE_SUFFIX}"
|
||||||
|
PRIOR_UBUNTU_CONTAINER_FQIN: "quay.io/libpod/prior-ubuntu_podman:${IMAGE_SUFFIX}"
|
||||||
|
|
||||||
|
####
|
||||||
|
#### Control variables that determine what to run and how to run it.
|
||||||
|
#### N/B: Required ALL of these are set for every single task.
|
||||||
|
####
|
||||||
|
TEST_FLAVOR: # int, sys, ext_svc, validate, automation, etc.
|
||||||
|
TEST_ENVIRON: host # 'host' or 'container'
|
||||||
|
PODBIN_NAME: podman # 'podman' or 'remote'
|
||||||
|
PRIV_NAME: root # 'root' or 'rootless'
|
||||||
|
DISTRO_NV: # any {PRIOR_,}{FEDORA,UBUNTU}_NAME value
|
||||||
|
VM_IMAGE_NAME: # One of the "Google-cloud VM Images" (above)
|
||||||
|
CTR_FQIN: # One of the "Container FQIN's" (above)
|
||||||
|
|
||||||
|
|
||||||
|
# Default timeout for each task
|
||||||
|
timeout_in: 60m
|
||||||
|
|
||||||
|
|
||||||
|
gcp_credentials: ENCRYPTED[a28959877b2c9c36f151781b0a05407218cda646c7d047fc556e42f55e097e897ab63ee78369dae141dcf0b46a9d0cdd]
|
||||||
|
|
||||||
|
aws_credentials: ENCRYPTED[4ca070bffe28eb9b27d63c568b52970dd46f119c3a83b8e443241e895dbf1737580b4d84eed27a311a2b74287ef9f79f]
|
||||||
|
|
||||||
|
|
||||||
|
# Default/small container image to execute tasks with
|
||||||
|
container: &smallcontainer
|
||||||
|
image: ${CTR_FQIN}
|
||||||
|
# Resources are limited across ALL currently executing tasks
|
||||||
|
# ref: https://cirrus-ci.org/guide/linux/#linux-containers
|
||||||
|
cpu: 2
|
||||||
|
memory: 2
|
||||||
|
|
||||||
|
|
||||||
|
# Attempt to prevent flakes by confirming all required external/3rd-party
|
||||||
|
# services are available and functional.
|
||||||
|
ext_svc_check_task:
|
||||||
|
alias: 'ext_svc_check' # int. ref. name - required for depends_on reference
|
||||||
|
name: "Ext. services" # Displayed Title - has no other significance
|
||||||
|
skip: &tags "$CIRRUS_TAG != ''" # Don't run on tags
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: ext_svc
|
||||||
|
CTR_FQIN: ${FEDORA_CONTAINER_FQIN}
|
||||||
|
# NOTE: The default way Cirrus-CI clones is *NOT* compatible with
|
||||||
|
# environment expectations in contrib/cirrus/lib.sh. Specifically
|
||||||
|
# the 'origin' remote must be defined, and all remote branches/tags
|
||||||
|
# must be available for reference from CI scripts.
|
||||||
|
clone_script: &full_clone |
|
||||||
|
cd /
|
||||||
|
rm -rf $CIRRUS_WORKING_DIR
|
||||||
|
mkdir -p $CIRRUS_WORKING_DIR
|
||||||
|
git clone --recursive --branch=$DEST_BRANCH https://x-access-token:${CIRRUS_REPO_CLONE_TOKEN}@github.com/${CIRRUS_REPO_FULL_NAME}.git $CIRRUS_WORKING_DIR
|
||||||
|
cd $CIRRUS_WORKING_DIR
|
||||||
|
git remote update origin
|
||||||
|
if [[ -n "$CIRRUS_PR" ]]; then # running for a PR
|
||||||
|
git fetch origin pull/$CIRRUS_PR/head:pull/$CIRRUS_PR
|
||||||
|
git checkout pull/$CIRRUS_PR
|
||||||
|
else
|
||||||
|
git reset --hard $CIRRUS_CHANGE_IN_REPO
|
||||||
|
fi
|
||||||
|
make install.tools
|
||||||
|
|
||||||
|
setup_script: &setup '$GOSRC/$SCRIPT_BASE/setup_environment.sh'
|
||||||
|
main_script: &main '/usr/bin/time --verbose --output="$STATS_LOGFILE" $GOSRC/$SCRIPT_BASE/runner.sh'
|
||||||
|
always: &runner_stats
|
||||||
|
runner_stats_artifacts:
|
||||||
|
path: ./*-${STATS_LOGFILE_SFX}
|
||||||
|
type: text/plain
|
||||||
|
|
||||||
|
|
||||||
|
# Execute some quick checks to confirm this YAML file and all
|
||||||
|
# automation-related shell scripts are sane.
|
||||||
|
automation_task:
|
||||||
|
alias: 'automation'
|
||||||
|
name: "Check Automation"
|
||||||
|
skip: &branches_and_tags "$CIRRUS_PR == '' || $CIRRUS_TAG != ''" # Don't run on branches/tags
|
||||||
|
container: *smallcontainer
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: automation
|
||||||
|
CTR_FQIN: ${FEDORA_CONTAINER_FQIN}
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
clone_script: *full_clone
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *runner_stats
|
||||||
|
|
||||||
|
|
||||||
|
# N/B: This task is critical. It builds all binaries and release archives
|
||||||
|
# for the project, using all primary OS platforms and versions. Assuming
|
||||||
|
# the builds are successful, a cache is stored of the entire `$GOPATH`
|
||||||
|
# contents. For all subsequent tasks, the _BUILD_CACHE_HANDLE value
|
||||||
|
# is used as a key to reuse this cache, saving both time and money.
|
||||||
|
# The only exceptions are tasks which only run inside a container, they
|
||||||
|
# will not have access the cache and therefore must rely on cloning the
|
||||||
|
# repository.
|
||||||
|
build_task:
|
||||||
|
alias: 'build'
|
||||||
|
name: 'Build for $DISTRO_NV'
|
||||||
|
gce_instance: &standardvm
|
||||||
|
image_project: libpod-218412
|
||||||
|
zone: "us-central1-a"
|
||||||
|
cpu: 2
|
||||||
|
memory: "4Gb"
|
||||||
|
# Required to be 200gig, do not modify - has i/o performance impact
|
||||||
|
# according to gcloud CLI tool warning messages.
|
||||||
|
disk: 200
|
||||||
|
image_name: "${VM_IMAGE_NAME}" # from stdenvars
|
||||||
|
matrix: &platform_axis
|
||||||
|
# Ref: https://cirrus-ci.org/guide/writing-tasks/#matrix-modification
|
||||||
|
- env: &stdenvars
|
||||||
|
DISTRO_NV: ${FEDORA_NAME}
|
||||||
|
# Not used here, is used in other tasks
|
||||||
|
VM_IMAGE_NAME: ${FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
CTR_FQIN: ${FEDORA_CONTAINER_FQIN}
|
||||||
|
# ID for re-use of build output
|
||||||
|
_BUILD_CACHE_HANDLE: ${FEDORA_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
# - env:
|
||||||
|
# DISTRO_NV: ${PRIOR_FEDORA_NAME}
|
||||||
|
# VM_IMAGE_NAME: ${PRIOR_FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
# CTR_FQIN: ${PRIOR_FEDORA_CONTAINER_FQIN}
|
||||||
|
# _BUILD_CACHE_HANDLE: ${PRIOR_FEDORA_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
- env:
|
||||||
|
DISTRO_NV: ${UBUNTU_NAME}
|
||||||
|
VM_IMAGE_NAME: ${UBUNTU_CACHE_IMAGE_NAME}
|
||||||
|
CTR_FQIN: ${UBUNTU_CONTAINER_FQIN}
|
||||||
|
_BUILD_CACHE_HANDLE: ${UBUNTU_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
- env:
|
||||||
|
DISTRO_NV: ${PRIOR_UBUNTU_NAME}
|
||||||
|
VM_IMAGE_NAME: ${PRIOR_UBUNTU_CACHE_IMAGE_NAME}
|
||||||
|
CTR_FQIN: ${PRIOR_UBUNTU_CONTAINER_FQIN}
|
||||||
|
_BUILD_CACHE_HANDLE: ${PRIOR_UBUNTU_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: build
|
||||||
|
# Ref: https://cirrus-ci.org/guide/writing-tasks/#cache-instruction
|
||||||
|
gopath_cache: &gopath_cache
|
||||||
|
folder: *gopath # Required hard-coded path, no variables.
|
||||||
|
fingerprint_script: echo "$_BUILD_CACHE_HANDLE"
|
||||||
|
# Cheat: Clone here when cache is empty, guaranteeing consistency.
|
||||||
|
populate_script: *full_clone
|
||||||
|
# A normal clone would invalidate useful cache
|
||||||
|
clone_script: &noop mkdir -p $CIRRUS_WORKING_DIR
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: &binary_artifacts
|
||||||
|
<<: *runner_stats
|
||||||
|
gosrc_artifacts:
|
||||||
|
path: ./* # Grab everything in top-level $GOSRC
|
||||||
|
type: application/octet-stream
|
||||||
|
binary_artifacts:
|
||||||
|
path: ./bin/*
|
||||||
|
type: application/octet-stream
|
||||||
|
|
||||||
|
|
||||||
|
# Confirm the result of building on at least one platform appears sane.
|
||||||
|
# This confirms the binaries can be executed, checks --help vs docs, and
|
||||||
|
# other essential post-build validation checks.
|
||||||
|
validate_task:
|
||||||
|
name: "Validate $DISTRO_NV Build"
|
||||||
|
alias: validate
|
||||||
|
# This task is primarily intended to catch human-errors early on, in a
|
||||||
|
# PR. Skip it for branch-push, branch-create, and tag-push to improve
|
||||||
|
# automation reliability/speed in those contexts. Any missed errors due
|
||||||
|
# to nonsequential PR merging practices, will be caught on a future PR,
|
||||||
|
# build or test task failures.
|
||||||
|
skip: *branches_and_tags
|
||||||
|
depends_on:
|
||||||
|
- ext_svc_check
|
||||||
|
- automation
|
||||||
|
- build
|
||||||
|
# golangci-lint is a very, very hungry beast.
|
||||||
|
gce_instance: &bigvm
|
||||||
|
<<: *standardvm
|
||||||
|
cpu: 8
|
||||||
|
memory: "16Gb"
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: validate
|
||||||
|
gopath_cache: &ro_gopath_cache
|
||||||
|
<<: *gopath_cache
|
||||||
|
reupload_on_changes: false
|
||||||
|
clone_script: *noop
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *runner_stats
|
||||||
|
|
||||||
|
|
||||||
|
# Exercise the "libpod" API with a small set of common
|
||||||
|
# operations to ensure they are functional.
|
||||||
|
bindings_task:
|
||||||
|
name: "Test Bindings"
|
||||||
|
alias: bindings
|
||||||
|
only_if: ¬_docs $CIRRUS_CHANGE_TITLE !=~ '.*CI:DOCS.*'
|
||||||
|
skip: *branches_and_tags
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: bindings
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *runner_stats
|
||||||
|
|
||||||
|
|
||||||
|
# Build the "libpod" API documentation `swagger.yaml` and
|
||||||
|
# publish it to google-cloud-storage (GCS).
|
||||||
|
swagger_task:
|
||||||
|
name: "Test Swagger"
|
||||||
|
alias: swagger
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: swagger
|
||||||
|
# TODO: Due to podman 3.0 activity (including new images), avoid
|
||||||
|
# disturbing the status-quo just to incorporate this one new
|
||||||
|
# container image. Uncomment line below when CI activities normalize.
|
||||||
|
#CTR_FQIN: 'quay.io/libpod/gcsupld:${IMAGE_SUFFIX}'
|
||||||
|
CTR_FQIN: 'quay.io/libpod/gcsupld:c4813063494828032'
|
||||||
|
GCPJSON: ENCRYPTED[asdf1234]
|
||||||
|
GCPNAME: ENCRYPTED[asdf1234]
|
||||||
|
GCPPROJECT: 'libpod-218412'
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *binary_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# Check that all included go modules from other sources match
|
||||||
|
# what is expected in `vendor/modules.txt` vs `go.mod`. Also
|
||||||
|
# make sure that the generated bindings in pkg/bindings/...
|
||||||
|
# are in sync with the code.
|
||||||
|
consistency_task:
|
||||||
|
name: "Test Code Consistency"
|
||||||
|
alias: consistency
|
||||||
|
skip: *tags
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: consistency
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
CTR_FQIN: ${FEDORA_CONTAINER_FQIN}
|
||||||
|
clone_script: *full_clone # build-cache not available to container tasks
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *runner_stats
|
||||||
|
|
||||||
|
|
||||||
|
# There are several other important variations of podman which
|
||||||
|
# must always build successfully. Most of them are handled in
|
||||||
|
# this task, though a few need dedicated tasks which follow.
|
||||||
|
alt_build_task:
|
||||||
|
name: "$ALT_NAME"
|
||||||
|
alias: alt_build
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: "altbuild"
|
||||||
|
gce_instance: *standardvm
|
||||||
|
matrix:
|
||||||
|
- env:
|
||||||
|
ALT_NAME: 'Build Each Commit'
|
||||||
|
- env:
|
||||||
|
ALT_NAME: 'Windows Cross'
|
||||||
|
- env:
|
||||||
|
ALT_NAME: 'Build Without CGO'
|
||||||
|
- env:
|
||||||
|
ALT_NAME: 'Test build RPM'
|
||||||
|
- env:
|
||||||
|
ALT_NAME: 'Alt Arch. Cross'
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *binary_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# Confirm building a statically-linked binary is successful
|
||||||
|
static_alt_build_task:
|
||||||
|
name: "Static Build"
|
||||||
|
alias: static_alt_build
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
# Community-maintained task, may fail on occasion. If so, uncomment
|
||||||
|
# the next line and file an issue with details about the failure.
|
||||||
|
# allow_failures: $CI == $CI
|
||||||
|
gce_instance: *bigvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: "altbuild"
|
||||||
|
# gce_instance variation prevents this being included in alt_build_task
|
||||||
|
ALT_NAME: 'Static build'
|
||||||
|
# Do not use 'latest', fixed-version tag for runtime stability.
|
||||||
|
CTR_FQIN: "docker.io/nixos/nix:2.3.6"
|
||||||
|
# Authentication token for pushing the build cache to cachix.
|
||||||
|
# This is critical, it helps to avoid a very lengthy process of
|
||||||
|
# statically building every dependency needed to build podman.
|
||||||
|
# Assuming the pinned nix dependencies in nix/nixpkgs.json have not
|
||||||
|
# changed, this cache will ensure that only the static podman binary is
|
||||||
|
# built.
|
||||||
|
CACHIX_AUTH_TOKEN: ENCRYPTED[asdf1234]
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *binary_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# Confirm building the remote client, natively on a Mac OS-X VM.
|
||||||
|
osx_alt_build_task: &blahblah
|
||||||
|
name: "OSX Cross"
|
||||||
|
alias: osx_alt_build
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
# OSX platform variation prevents this being included in alt_build_task
|
||||||
|
TEST_FLAVOR: "altbuild"
|
||||||
|
ALT_NAME: 'OSX Cross'
|
||||||
|
osx_instance:
|
||||||
|
image: 'catalina-base'
|
||||||
|
script:
|
||||||
|
- brew install go
|
||||||
|
- brew install go-md2man
|
||||||
|
- make podman-remote-darwin
|
||||||
|
- make install-podman-remote-darwin-docs
|
||||||
|
always: *binary_artifacts
|
||||||
|
|
||||||
|
macos_alt_build_task:
|
||||||
|
<<: *blahblah
|
||||||
|
name: "MacOS Cross"
|
||||||
|
alias: macos_alt_build
|
||||||
|
macos_instance:
|
||||||
|
image: 'catalina-base'
|
||||||
|
|
||||||
|
# This task is a stub: In the future it will be used to verify
|
||||||
|
# podman is compatible with the docker python-module.
|
||||||
|
docker-py_test_task:
|
||||||
|
name: Docker-py Compat.
|
||||||
|
alias: docker-py_test
|
||||||
|
skip: *tags
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: docker-py
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *runner_stats
|
||||||
|
|
||||||
|
|
||||||
|
# Does exactly what it says, execute the podman unit-tests on all primary
|
||||||
|
# platforms and release versions.
|
||||||
|
unit_test_task:
|
||||||
|
name: "Unit tests on $DISTRO_NV"
|
||||||
|
alias: unit_test
|
||||||
|
skip: *tags
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- validate
|
||||||
|
matrix: *platform_axis
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: unit
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *runner_stats
|
||||||
|
|
||||||
|
|
||||||
|
apiv2_test_task:
|
||||||
|
name: "APIv2 test on $DISTRO_NV"
|
||||||
|
alias: apiv2_test
|
||||||
|
skip: *tags
|
||||||
|
depends_on:
|
||||||
|
- validate
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: apiv2
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: &logs_artifacts
|
||||||
|
<<: *runner_stats
|
||||||
|
# Required for `contrib/cirrus/logformatter` to work properly
|
||||||
|
html_artifacts:
|
||||||
|
path: ./*.html
|
||||||
|
type: text/html
|
||||||
|
package_versions_script: '$SCRIPT_BASE/logcollector.sh packages'
|
||||||
|
df_script: '$SCRIPT_BASE/logcollector.sh df'
|
||||||
|
audit_log_script: '$SCRIPT_BASE/logcollector.sh audit'
|
||||||
|
journal_script: '$SCRIPT_BASE/logcollector.sh journal'
|
||||||
|
podman_system_info_script: '$SCRIPT_BASE/logcollector.sh podman'
|
||||||
|
time_script: '$SCRIPT_BASE/logcollector.sh time'
|
||||||
|
|
||||||
|
compose_test_task:
|
||||||
|
name: "compose test on $DISTRO_NV"
|
||||||
|
alias: compose_test
|
||||||
|
only_if: *not_docs
|
||||||
|
skip: *tags
|
||||||
|
depends_on:
|
||||||
|
- validate
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: compose
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *logs_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# Execute the podman integration tests on all primary platforms and release
|
||||||
|
# versions, as root, without involving the podman-remote client.
|
||||||
|
local_integration_test_task: &local_integration_test_task
|
||||||
|
# Integration-test task name convention:
|
||||||
|
# <int.|sys.> <podman|remote> <Distro NV> <root|rootless>
|
||||||
|
name: &std_name_fmt "$TEST_FLAVOR $PODBIN_NAME $DISTRO_NV $PRIV_NAME $TEST_ENVIRON"
|
||||||
|
alias: local_integration_test
|
||||||
|
only_if: *not_docs
|
||||||
|
skip: *branches_and_tags
|
||||||
|
depends_on:
|
||||||
|
- unit_test
|
||||||
|
matrix: *platform_axis
|
||||||
|
gce_instance: *standardvm
|
||||||
|
timeout_in: 90m
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: &int_logs_artifacts
|
||||||
|
<<: *logs_artifacts
|
||||||
|
ginkgo_node_logs_artifacts:
|
||||||
|
path: ./test/e2e/ginkgo-node-*.log
|
||||||
|
type: text/plain
|
||||||
|
|
||||||
|
|
||||||
|
# Nearly identical to `local_integration_test` except all operations
|
||||||
|
# are performed through the podman-remote client vs a podman "server"
|
||||||
|
# running on the same host.
|
||||||
|
remote_integration_test_task:
|
||||||
|
<<: *local_integration_test_task
|
||||||
|
alias: remote_integration_test
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
|
||||||
|
|
||||||
|
# Run the complete set of integration tests from inside a container.
|
||||||
|
# This verifies all/most operations function with "podman-in-podman".
|
||||||
|
container_integration_test_task:
|
||||||
|
name: *std_name_fmt
|
||||||
|
alias: container_integration_test
|
||||||
|
only_if: *not_docs
|
||||||
|
skip: *branches_and_tags
|
||||||
|
depends_on:
|
||||||
|
- unit_test
|
||||||
|
matrix: &fedora_vm_axis
|
||||||
|
- env:
|
||||||
|
DISTRO_NV: ${FEDORA_NAME}
|
||||||
|
_BUILD_CACHE_HANDLE: ${FEDORA_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
VM_IMAGE_NAME: ${FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
CTR_FQIN: ${FEDORA_CONTAINER_FQIN}
|
||||||
|
# - env:
|
||||||
|
# DISTRO_NV: ${PRIOR_FEDORA_NAME}
|
||||||
|
# _BUILD_CACHE_HANDLE: ${PRIOR_FEDORA_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
# VM_IMAGE_NAME: ${PRIOR_FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
# CTR_FQIN: ${PRIOR_FEDORA_CONTAINER_FQIN}
|
||||||
|
gce_instance: *standardvm
|
||||||
|
timeout_in: 90m
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *int_logs_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# Execute most integration tests as a regular (non-root) user.
|
||||||
|
rootless_integration_test_task:
|
||||||
|
name: *std_name_fmt
|
||||||
|
alias: rootless_integration_test
|
||||||
|
only_if: *not_docs
|
||||||
|
skip: *branches_and_tags
|
||||||
|
depends_on:
|
||||||
|
- unit_test
|
||||||
|
matrix: *fedora_vm_axis
|
||||||
|
gce_instance: *standardvm
|
||||||
|
timeout_in: 90m
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
PRIV_NAME: rootless
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *int_logs_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
podman_machine_task:
|
||||||
|
name: *std_name_fmt
|
||||||
|
alias: podman_machine
|
||||||
|
# FIXME: Added for speedy-testing
|
||||||
|
only_if: $CIRRUS_CHANGE_TITLE =~ '.*CI:BUILD.*'
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
- local_integration_test
|
||||||
|
- remote_integration_test
|
||||||
|
- container_integration_test
|
||||||
|
- rootless_integration_test
|
||||||
|
ec2_instance:
|
||||||
|
image: "${VM_IMAGE_NAME}"
|
||||||
|
type: m5zn.metal # Bare-metal instance is required
|
||||||
|
region: us-east-1
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: "machine"
|
||||||
|
PRIV_NAME: "rootless" # intended use-case
|
||||||
|
DISTRO_NV: "${FEDORA_NAME}"
|
||||||
|
VM_IMAGE_NAME: "${FEDORA_AMI_ID}"
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *int_logs_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# Always run subsequent to integration tests. While parallelism is lost
|
||||||
|
# with runtime, debugging system-test failures can be more challenging
|
||||||
|
# for some golang developers. Otherwise the following tasks run across
|
||||||
|
# the same matrix as the integration-tests (above).
|
||||||
|
local_system_test_task: &local_system_test_task
|
||||||
|
name: *std_name_fmt
|
||||||
|
alias: local_system_test
|
||||||
|
skip: *tags
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- local_integration_test
|
||||||
|
matrix: *platform_axis
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *logs_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
remote_system_test_task:
|
||||||
|
<<: *local_system_test_task
|
||||||
|
alias: remote_system_test
|
||||||
|
depends_on:
|
||||||
|
- remote_integration_test
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
|
||||||
|
|
||||||
|
rootless_system_test_task:
|
||||||
|
name: *std_name_fmt
|
||||||
|
alias: rootless_system_test
|
||||||
|
skip: *tags
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- rootless_integration_test
|
||||||
|
matrix: *fedora_vm_axis
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
PRIV_NAME: rootless
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *logs_artifacts
|
||||||
|
|
||||||
|
# FIXME: we may want to consider running this from nightly cron instead of CI.
|
||||||
|
# The tests are actually pretty quick (less than a minute) but they do rely
|
||||||
|
# on pulling images from quay.io, which means we're subject to network flakes.
|
||||||
|
#
|
||||||
|
# FIXME: how does this env matrix work, anyway? Does it spin up multiple VMs?
|
||||||
|
# We might just want to encode the version matrix in runner.sh instead
|
||||||
|
upgrade_test_task:
|
||||||
|
name: "Upgrade test: from $PODMAN_UPGRADE_FROM"
|
||||||
|
alias: upgrade_test
|
||||||
|
skip: *tags
|
||||||
|
only_if: *not_docs
|
||||||
|
depends_on:
|
||||||
|
- local_system_test
|
||||||
|
matrix:
|
||||||
|
- env:
|
||||||
|
PODMAN_UPGRADE_FROM: v1.9.0
|
||||||
|
- env:
|
||||||
|
PODMAN_UPGRADE_FROM: v2.0.6
|
||||||
|
- env:
|
||||||
|
PODMAN_UPGRADE_FROM: v2.1.1
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
TEST_FLAVOR: upgrade_test
|
||||||
|
DISTRO_NV: ${FEDORA_NAME}
|
||||||
|
VM_IMAGE_NAME: ${FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
# ID for re-use of build output
|
||||||
|
_BUILD_CACHE_HANDLE: ${FEDORA_NAME}-build-${CIRRUS_BUILD_ID}
|
||||||
|
clone_script: *noop
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *logs_artifacts
|
||||||
|
|
||||||
|
# This task is critical. It updates the "last-used by" timestamp stored
|
||||||
|
# in metadata for all VM images. This mechanism functions in tandem with
|
||||||
|
# an out-of-band pruning operation to remove disused VM images.
|
||||||
|
meta_task:
|
||||||
|
name: "VM img. keepalive"
|
||||||
|
alias: meta
|
||||||
|
container:
|
||||||
|
cpu: 2
|
||||||
|
memory: 2
|
||||||
|
image: quay.io/libpod/imgts:$IMAGE_SUFFIX
|
||||||
|
env:
|
||||||
|
# Space-separated list of images used by this repository state
|
||||||
|
IMGNAMES: >-
|
||||||
|
${FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
${PRIOR_FEDORA_CACHE_IMAGE_NAME}
|
||||||
|
${UBUNTU_CACHE_IMAGE_NAME}
|
||||||
|
${PRIOR_UBUNTU_CACHE_IMAGE_NAME}
|
||||||
|
BUILDID: "${CIRRUS_BUILD_ID}"
|
||||||
|
REPOREF: "${CIRRUS_REPO_NAME}"
|
||||||
|
GCPJSON: ENCRYPTED[asdf1234]
|
||||||
|
GCPNAME: ENCRYPTED[asdf1234]
|
||||||
|
GCPPROJECT: libpod-218412
|
||||||
|
clone_script: *noop
|
||||||
|
script: /usr/local/bin/entrypoint.sh
|
||||||
|
|
||||||
|
|
||||||
|
# Status aggregator for all tests. This task simply ensures a defined
|
||||||
|
# set of tasks all passed, and allows confirming that based on the status
|
||||||
|
# of this task.
|
||||||
|
success_task:
|
||||||
|
name: "Total Success"
|
||||||
|
alias: success
|
||||||
|
# N/B: ALL tasks must be listed here, minus their '_task' suffix.
|
||||||
|
depends_on:
|
||||||
|
- ext_svc_check
|
||||||
|
- automation
|
||||||
|
- build
|
||||||
|
- validate
|
||||||
|
- bindings
|
||||||
|
- swagger
|
||||||
|
- consistency
|
||||||
|
- alt_build
|
||||||
|
- static_alt_build
|
||||||
|
- osx_alt_build
|
||||||
|
- docker-py_test
|
||||||
|
- unit_test
|
||||||
|
- apiv2_test
|
||||||
|
- compose_test
|
||||||
|
- local_integration_test
|
||||||
|
- remote_integration_test
|
||||||
|
- rootless_integration_test
|
||||||
|
- container_integration_test
|
||||||
|
- podman_machine
|
||||||
|
- local_system_test
|
||||||
|
- remote_system_test
|
||||||
|
- rootless_system_test
|
||||||
|
- upgrade_test
|
||||||
|
- meta
|
||||||
|
env:
|
||||||
|
CTR_FQIN: ${FEDORA_CONTAINER_FQIN}
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
clone_script: *noop
|
||||||
|
script: /bin/true
|
||||||
|
|
||||||
|
win_installer_task:
|
||||||
|
name: "Verify Win Installer Build"
|
||||||
|
alias: win_installer
|
||||||
|
# Don't run for multiarch container image cirrus-cron job.
|
||||||
|
only_if: $CIRRUS_CRON != 'multiarch'
|
||||||
|
depends_on:
|
||||||
|
- alt_build
|
||||||
|
windows_container:
|
||||||
|
image: "cirrusci/windowsservercore:2019"
|
||||||
|
env:
|
||||||
|
PATH: "${PATH};C:\\ProgramData\\chocolatey\\bin"
|
||||||
|
CIRRUS_SHELL: powershell
|
||||||
|
# Fake version, we are only testing the installer functions, so version doesn't matter
|
||||||
|
WIN_INST_VER: 9.9.9
|
||||||
|
install_script: '.\contrib\cirrus\win-installer-install.ps1'
|
||||||
|
main_script: '.\contrib\cirrus\win-installer-main.ps1'
|
||||||
|
|
||||||
|
# When a new tag is pushed, confirm that the code and commits
|
||||||
|
# meet criteria for an official release.
|
||||||
|
release_task:
|
||||||
|
name: "Verify Release"
|
||||||
|
alias: release
|
||||||
|
only_if: *tags
|
||||||
|
depends_on:
|
||||||
|
- success
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: release
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *binary_artifacts
|
||||||
|
|
||||||
|
|
||||||
|
# When preparing to release a new version, this task may be manually
|
||||||
|
# activated at the PR stage to verify the build is proper for a potential
|
||||||
|
# podman release.
|
||||||
|
#
|
||||||
|
# Note: This cannot use a YAML alias on 'release_task' as of this
|
||||||
|
# comment, it is incompatible with 'trigger_type: manual'
|
||||||
|
release_test_task:
|
||||||
|
name: "Optional Release Test"
|
||||||
|
alias: release_test
|
||||||
|
only_if: $CIRRUS_PR != ''
|
||||||
|
trigger_type: manual
|
||||||
|
depends_on:
|
||||||
|
- success
|
||||||
|
gce_instance: *standardvm
|
||||||
|
env:
|
||||||
|
<<: *stdenvars
|
||||||
|
TEST_FLAVOR: release
|
||||||
|
gopath_cache: *ro_gopath_cache
|
||||||
|
clone_script: *noop # Comes from cache
|
||||||
|
setup_script: *setup
|
||||||
|
main_script: *main
|
||||||
|
always: *binary_artifacts
|
|
@ -0,0 +1,46 @@
|
||||||
|
APIv2 test on fedora-33
|
||||||
|
Alt Arch. Cross
|
||||||
|
Build Each Commit
|
||||||
|
Build Without CGO
|
||||||
|
Build for fedora-33
|
||||||
|
Build for ubuntu-2004
|
||||||
|
Build for ubuntu-2010
|
||||||
|
Check Automation
|
||||||
|
Docker-py Compat.
|
||||||
|
Ext. services
|
||||||
|
OSX Cross
|
||||||
|
Optional Release Test
|
||||||
|
Static Build
|
||||||
|
Test Bindings
|
||||||
|
Test Code Consistency
|
||||||
|
Test Swagger
|
||||||
|
Test build RPM
|
||||||
|
Total Success
|
||||||
|
Unit tests on fedora-33
|
||||||
|
Unit tests on ubuntu-2004
|
||||||
|
Unit tests on ubuntu-2010
|
||||||
|
Upgrade test: from v1.9.0
|
||||||
|
Upgrade test: from v2.0.6
|
||||||
|
Upgrade test: from v2.1.1
|
||||||
|
VM img. keepalive
|
||||||
|
Validate fedora-33 Build
|
||||||
|
Verify Release
|
||||||
|
Verify Win Installer Build
|
||||||
|
Windows Cross
|
||||||
|
compose test on fedora-33
|
||||||
|
int podman fedora-33 root container
|
||||||
|
int podman fedora-33 root host
|
||||||
|
int podman fedora-33 rootless host
|
||||||
|
int podman ubuntu-2004 root host
|
||||||
|
int podman ubuntu-2010 root host
|
||||||
|
int remote fedora-33 root host
|
||||||
|
int remote ubuntu-2004 root host
|
||||||
|
int remote ubuntu-2010 root host
|
||||||
|
machine podman fedora-33 rootless host
|
||||||
|
sys podman fedora-33 root host
|
||||||
|
sys podman fedora-33 rootless host
|
||||||
|
sys podman ubuntu-2004 root host
|
||||||
|
sys podman ubuntu-2010 root host
|
||||||
|
sys remote fedora-33 root host
|
||||||
|
sys remote ubuntu-2004 root host
|
||||||
|
sys remote ubuntu-2010 root host
|
|
@ -0,0 +1,421 @@
|
||||||
|
---
|
||||||
|
|
||||||
|
global_env:
|
||||||
|
CIRRUS_SHELL: /bin/bash
|
||||||
|
CIRRUS_WORKING_DIR: /var/tmp/go/src/github.com/containers/podman
|
||||||
|
CTR_FQIN: None
|
||||||
|
DEST_BRANCH: master
|
||||||
|
DISTRO_NV: None
|
||||||
|
FEDORA_CACHE_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
FEDORA_CONTAINER_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
FEDORA_NAME: fedora-33
|
||||||
|
GOBIN: /var/tmp/go/bin
|
||||||
|
GOCACHE: /var/tmp/go/cache
|
||||||
|
GOPATH: /var/tmp/go
|
||||||
|
GOSRC: /var/tmp/go/src/github.com/containers/podman
|
||||||
|
IMAGE_SUFFIX: c6524344056676352
|
||||||
|
PODBIN_NAME: podman
|
||||||
|
PRIOR_FEDORA_CACHE_IMAGE_NAME: prior-fedora-c6524344056676352
|
||||||
|
PRIOR_FEDORA_CONTAINER_FQIN: quay.io/libpod/prior-fedora_podman:c6524344056676352
|
||||||
|
PRIOR_FEDORA_NAME: fedora-32
|
||||||
|
PRIOR_UBUNTU_CACHE_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
PRIOR_UBUNTU_CONTAINER_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
PRIOR_UBUNTU_NAME: ubuntu-2004
|
||||||
|
PRIV_NAME: root
|
||||||
|
SCRIPT_BASE: ./contrib/cirrus
|
||||||
|
STATS_LOGFILE: /var/tmp/go/src/github.com/containers/podman/${CIRRUS_TASK_NAME}-runner_stats.log
|
||||||
|
STATS_LOGFILE_SFX: runner_stats.log
|
||||||
|
TEST_ENVIRON: host
|
||||||
|
TEST_FLAVOR: None
|
||||||
|
UBUNTU_CACHE_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
UBUNTU_CONTAINER_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
UBUNTU_NAME: ubuntu-2010
|
||||||
|
VM_IMAGE_NAME: None
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
APIv2 test on fedora-33:
|
||||||
|
alias: apiv2_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: apiv2
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Alt Arch. Cross:
|
||||||
|
alias: alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: Alt Arch. Cross
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Build Each Commit:
|
||||||
|
alias: alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: Build Each Commit
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Build Without CGO:
|
||||||
|
alias: alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: Build Without CGO
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Build for fedora-33:
|
||||||
|
alias: build
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: build
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Build for ubuntu-2004:
|
||||||
|
alias: build
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2004
|
||||||
|
TEST_FLAVOR: build
|
||||||
|
VM_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2004-build-${CIRRUS_BUILD_ID}
|
||||||
|
Build for ubuntu-2010:
|
||||||
|
alias: build
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2010
|
||||||
|
TEST_FLAVOR: build
|
||||||
|
VM_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2010-build-${CIRRUS_BUILD_ID}
|
||||||
|
Check Automation:
|
||||||
|
alias: automation
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
TEST_FLAVOR: automation
|
||||||
|
Docker-py Compat.:
|
||||||
|
alias: docker-py_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
TEST_FLAVOR: docker-py
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Ext. services:
|
||||||
|
alias: ext_svc_check
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
TEST_FLAVOR: ext_svc
|
||||||
|
OSX Cross:
|
||||||
|
alias: osx_alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: OSX Cross
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
MacOS Cross:
|
||||||
|
alias: macos_alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: MacOS Cross
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Optional Release Test:
|
||||||
|
alias: release_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: release
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Static Build:
|
||||||
|
alias: static_alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: Static build
|
||||||
|
CTR_FQIN: docker.io/nixos/nix:2.3.6
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Test Bindings:
|
||||||
|
alias: bindings
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: bindings
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Test Code Consistency:
|
||||||
|
alias: consistency
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
TEST_FLAVOR: consistency
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Test Swagger:
|
||||||
|
alias: swagger
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/gcsupld:c4813063494828032
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
GCPPROJECT: libpod-218412
|
||||||
|
TEST_FLAVOR: swagger
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Test build RPM:
|
||||||
|
alias: alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: Test build RPM
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Total Success:
|
||||||
|
alias: success
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
Unit tests on fedora-33:
|
||||||
|
alias: unit_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: unit
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Unit tests on ubuntu-2004:
|
||||||
|
alias: unit_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2004
|
||||||
|
TEST_FLAVOR: unit
|
||||||
|
VM_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2004-build-${CIRRUS_BUILD_ID}
|
||||||
|
Unit tests on ubuntu-2010:
|
||||||
|
alias: unit_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2010
|
||||||
|
TEST_FLAVOR: unit
|
||||||
|
VM_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2010-build-${CIRRUS_BUILD_ID}
|
||||||
|
'Upgrade test: from v1.9.0':
|
||||||
|
alias: upgrade_test
|
||||||
|
env:
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PODMAN_UPGRADE_FROM: v1.9.0
|
||||||
|
TEST_FLAVOR: upgrade_test
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
'Upgrade test: from v2.0.6':
|
||||||
|
alias: upgrade_test
|
||||||
|
env:
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PODMAN_UPGRADE_FROM: v2.0.6
|
||||||
|
TEST_FLAVOR: upgrade_test
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
'Upgrade test: from v2.1.1':
|
||||||
|
alias: upgrade_test
|
||||||
|
env:
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PODMAN_UPGRADE_FROM: v2.1.1
|
||||||
|
TEST_FLAVOR: upgrade_test
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
VM img. keepalive:
|
||||||
|
alias: meta
|
||||||
|
env:
|
||||||
|
BUILDID: ${CIRRUS_BUILD_ID}
|
||||||
|
GCPPROJECT: libpod-218412
|
||||||
|
IMGNAMES: fedora-c6524344056676352 prior-fedora-c6524344056676352 ubuntu-c6524344056676352
|
||||||
|
prior-ubuntu-c6524344056676352
|
||||||
|
REPOREF: ${CIRRUS_REPO_NAME}
|
||||||
|
Validate fedora-33 Build:
|
||||||
|
alias: validate
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: validate
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Verify Release:
|
||||||
|
alias: release
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: release
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
Verify Win Installer Build:
|
||||||
|
alias: win_installer
|
||||||
|
env:
|
||||||
|
PATH: "${PATH};C:\\ProgramData\\chocolatey\\bin"
|
||||||
|
CIRRUS_SHELL: powershell
|
||||||
|
WIN_INST_VER: 9.9.9
|
||||||
|
Windows Cross:
|
||||||
|
alias: alt_build
|
||||||
|
env:
|
||||||
|
ALT_NAME: Windows Cross
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: altbuild
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
compose test on fedora-33:
|
||||||
|
alias: compose_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: compose
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
int podman fedora-33 root container:
|
||||||
|
alias: container_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_ENVIRON: container
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
int podman fedora-33 root host:
|
||||||
|
alias: local_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
int podman fedora-33 rootless host:
|
||||||
|
alias: rootless_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PRIV_NAME: rootless
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
int podman ubuntu-2004 root host:
|
||||||
|
alias: local_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2004
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2004-build-${CIRRUS_BUILD_ID}
|
||||||
|
int podman ubuntu-2010 root host:
|
||||||
|
alias: local_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2010
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2010-build-${CIRRUS_BUILD_ID}
|
||||||
|
int remote fedora-33 root host:
|
||||||
|
alias: remote_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
int remote ubuntu-2004 root host:
|
||||||
|
alias: remote_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2004
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2004-build-${CIRRUS_BUILD_ID}
|
||||||
|
int remote ubuntu-2010 root host:
|
||||||
|
alias: remote_integration_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2010
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
TEST_FLAVOR: int
|
||||||
|
VM_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2010-build-${CIRRUS_BUILD_ID}
|
||||||
|
machine podman fedora-33 rootless host:
|
||||||
|
alias: podman_machine
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: machine
|
||||||
|
PRIV_NAME: rootless
|
||||||
|
VM_IMAGE_NAME: ami-04f37091c3ec43890
|
||||||
|
sys podman fedora-33 root host:
|
||||||
|
alias: local_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
sys podman fedora-33 rootless host:
|
||||||
|
alias: rootless_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PRIV_NAME: rootless
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
sys podman ubuntu-2004 root host:
|
||||||
|
alias: local_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2004
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2004-build-${CIRRUS_BUILD_ID}
|
||||||
|
sys podman ubuntu-2010 root host:
|
||||||
|
alias: local_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2010
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2010-build-${CIRRUS_BUILD_ID}
|
||||||
|
sys remote fedora-33 root host:
|
||||||
|
alias: remote_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
DISTRO_NV: fedora-33
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: fedora-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: fedora-33-build-${CIRRUS_BUILD_ID}
|
||||||
|
sys remote ubuntu-2004 root host:
|
||||||
|
alias: remote_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/prior-ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2004
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: prior-ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2004-build-${CIRRUS_BUILD_ID}
|
||||||
|
sys remote ubuntu-2010 root host:
|
||||||
|
alias: remote_system_test
|
||||||
|
env:
|
||||||
|
CTR_FQIN: quay.io/libpod/ubuntu_podman:c6524344056676352
|
||||||
|
DISTRO_NV: ubuntu-2010
|
||||||
|
PODBIN_NAME: remote
|
||||||
|
TEST_FLAVOR: sys
|
||||||
|
VM_IMAGE_NAME: ubuntu-c6524344056676352
|
||||||
|
_BUILD_CACHE_HANDLE: ubuntu-2010-build-${CIRRUS_BUILD_ID}
|
|
@ -0,0 +1,139 @@
|
||||||
|
APIv2 test on fedora-33:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Alt Arch. Cross:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Build Each Commit:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Build Without CGO:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Build for fedora-33:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Build for ubuntu-2004:
|
||||||
|
- gcevm
|
||||||
|
- prior-ubuntu-c6524344056676352
|
||||||
|
Build for ubuntu-2010:
|
||||||
|
- gcevm
|
||||||
|
- ubuntu-c6524344056676352
|
||||||
|
Check Automation:
|
||||||
|
- container
|
||||||
|
- quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
Docker-py Compat.:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Ext. services:
|
||||||
|
- container
|
||||||
|
- quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
OSX Cross: &blahblah
|
||||||
|
- osx
|
||||||
|
- catalina-base
|
||||||
|
MacOS Cross: *blahblah
|
||||||
|
Optional Release Test:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Static Build:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Test Bindings:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Test Code Consistency:
|
||||||
|
- container
|
||||||
|
- quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
Test Swagger:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Test build RPM:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Total Success:
|
||||||
|
- container
|
||||||
|
- quay.io/libpod/fedora_podman:c6524344056676352
|
||||||
|
Unit tests on fedora-33:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Unit tests on ubuntu-2004:
|
||||||
|
- gcevm
|
||||||
|
- prior-ubuntu-c6524344056676352
|
||||||
|
Unit tests on ubuntu-2010:
|
||||||
|
- gcevm
|
||||||
|
- ubuntu-c6524344056676352
|
||||||
|
'Upgrade test: from v1.9.0':
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
'Upgrade test: from v2.0.6':
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
'Upgrade test: from v2.1.1':
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
VM img. keepalive:
|
||||||
|
- container
|
||||||
|
- quay.io/libpod/imgts:c6524344056676352
|
||||||
|
Validate fedora-33 Build:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Verify Release:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
Verify Win Installer Build:
|
||||||
|
- wincntnr
|
||||||
|
- cirrusci/windowsservercore:2019
|
||||||
|
Windows Cross:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
compose test on fedora-33:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
int podman fedora-33 root container:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
int podman fedora-33 root host:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
int podman fedora-33 rootless host:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
int podman ubuntu-2004 root host:
|
||||||
|
- gcevm
|
||||||
|
- prior-ubuntu-c6524344056676352
|
||||||
|
int podman ubuntu-2010 root host:
|
||||||
|
- gcevm
|
||||||
|
- ubuntu-c6524344056676352
|
||||||
|
int remote fedora-33 root host:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
int remote ubuntu-2004 root host:
|
||||||
|
- gcevm
|
||||||
|
- prior-ubuntu-c6524344056676352
|
||||||
|
int remote ubuntu-2010 root host:
|
||||||
|
- gcevm
|
||||||
|
- ubuntu-c6524344056676352
|
||||||
|
machine podman fedora-33 rootless host:
|
||||||
|
- ec2vm
|
||||||
|
- ami-04f37091c3ec43890
|
||||||
|
sys podman fedora-33 root host:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
sys podman fedora-33 rootless host:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
sys podman ubuntu-2004 root host:
|
||||||
|
- gcevm
|
||||||
|
- prior-ubuntu-c6524344056676352
|
||||||
|
sys podman ubuntu-2010 root host:
|
||||||
|
- gcevm
|
||||||
|
- ubuntu-c6524344056676352
|
||||||
|
sys remote fedora-33 root host:
|
||||||
|
- gcevm
|
||||||
|
- fedora-c6524344056676352
|
||||||
|
sys remote ubuntu-2004 root host:
|
||||||
|
- gcevm
|
||||||
|
- prior-ubuntu-c6524344056676352
|
||||||
|
sys remote ubuntu-2010 root host:
|
||||||
|
- gcevm
|
||||||
|
- ubuntu-c6524344056676352
|
|
@ -0,0 +1,20 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cd $(dirname ${BASH_SOURCE[0]})
|
||||||
|
./test_cirrus-ci_env.py
|
||||||
|
./testbin-cirrus-ci_env.sh
|
||||||
|
./testbin-cirrus-ci_env-installer.sh
|
||||||
|
|
||||||
|
if [[ "$GITHUB_ACTIONS" == "true" ]]; then
|
||||||
|
echo "Lint/Style checking not supported under github actions: Skipping"
|
||||||
|
exit 0
|
||||||
|
elif [[ -x $(type -P flake8-3) ]]; then
|
||||||
|
cd ..
|
||||||
|
flake8-3 --max-line-length=100 .
|
||||||
|
flake8-3 --max-line-length=100 --extend-ignore=D101,D102 test
|
||||||
|
else
|
||||||
|
echo "Can't find flake-8-3 binary, is script executing inside CI container?"
|
||||||
|
exit 1
|
||||||
|
fi
|
|
@ -0,0 +1,298 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""Verify cirrus-ci_env.py functions as expected."""
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import importlib.util
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
import unittest.mock as mock
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
# Assumes directory structure of this file relative to repo.
|
||||||
|
TEST_DIRPATH = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
SCRIPT_FILENAME = os.path.basename(__file__).replace('test_', '')
|
||||||
|
SCRIPT_DIRPATH = os.path.realpath(os.path.join(TEST_DIRPATH, '..', SCRIPT_FILENAME))
|
||||||
|
|
||||||
|
|
||||||
|
class TestBase(unittest.TestCase):
|
||||||
|
"""Base test class fixture."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Initialize before every test."""
|
||||||
|
super().setUp()
|
||||||
|
spec = importlib.util.spec_from_file_location("cci_env", SCRIPT_DIRPATH)
|
||||||
|
self.cci_env = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(self.cci_env)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
"""Finalize after every test."""
|
||||||
|
del self.cci_env
|
||||||
|
try:
|
||||||
|
del sys.modules["cci_env"]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TestEnvRender(TestBase):
|
||||||
|
"""Confirming Cirrus-CI in-line env. var. rendering behaviors."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Initialize before every test."""
|
||||||
|
super().setUp()
|
||||||
|
self.fake_cirrus = mock.Mock(spec=self.cci_env.CirrusCfg)
|
||||||
|
attrs = {"format_env.side_effect": self.cci_env.CirrusCfg.format_env,
|
||||||
|
"render_env.side_effect": self.cci_env.CirrusCfg.render_env,
|
||||||
|
"render_value.side_effect": self.cci_env.CirrusCfg.render_value,
|
||||||
|
"get_type_image.return_value": (None, None),
|
||||||
|
"init_task_type_image.return_value": None}
|
||||||
|
self.fake_cirrus.configure_mock(**attrs)
|
||||||
|
self.render_env = self.fake_cirrus.render_env
|
||||||
|
self.render_value = self.fake_cirrus.render_value
|
||||||
|
|
||||||
|
def test_empty(self):
|
||||||
|
"""Verify an empty env dict is unmodified."""
|
||||||
|
self.fake_cirrus.global_env = None
|
||||||
|
result = self.render_env(self.fake_cirrus, {})
|
||||||
|
self.assertDictEqual(result, {})
|
||||||
|
|
||||||
|
def test_simple_string(self):
|
||||||
|
"""Verify an simple string value is unmodified."""
|
||||||
|
self.fake_cirrus.global_env = None
|
||||||
|
result = self.render_env(self.fake_cirrus, dict(foo="bar"))
|
||||||
|
self.assertDictEqual(result, dict(foo="bar"))
|
||||||
|
|
||||||
|
def test_simple_sub(self):
|
||||||
|
"""Verify that a simple string substitution is performed."""
|
||||||
|
self.fake_cirrus.global_env = None
|
||||||
|
result = self.render_env(self.fake_cirrus, dict(foo="$bar", bar="foo"))
|
||||||
|
self.assertDictEqual(result, dict(foo="foo", bar="foo"))
|
||||||
|
|
||||||
|
def test_simple_multi(self):
|
||||||
|
"""Verify that multiple string substitution are performed."""
|
||||||
|
self.fake_cirrus.global_env = None
|
||||||
|
result = self.render_env(self.fake_cirrus,
|
||||||
|
dict(foo="$bar", bar="$baz", baz="foobarbaz"))
|
||||||
|
self.assertDictEqual(result,
|
||||||
|
dict(foo="foobarbaz", bar="foobarbaz", baz="foobarbaz"))
|
||||||
|
|
||||||
|
def test_simple_undefined(self):
|
||||||
|
"""Verify an undefined substitution falls back to dollar-curly env var."""
|
||||||
|
self.fake_cirrus.global_env = None
|
||||||
|
result = self.render_env(self.fake_cirrus, dict(foo="$baz", bar="${jar}"))
|
||||||
|
self.assertDictEqual(result, dict(foo="${baz}", bar="${jar}"))
|
||||||
|
|
||||||
|
def test_simple_global(self):
|
||||||
|
"""Verify global keys not duplicated into env."""
|
||||||
|
self.fake_cirrus.global_env = dict(bar="baz")
|
||||||
|
result = self.render_env(self.fake_cirrus, dict(foo="bar"))
|
||||||
|
self.assertDictEqual(result, dict(foo="bar"))
|
||||||
|
|
||||||
|
def test_simple_globalsub(self):
|
||||||
|
"""Verify global keys render substitutions."""
|
||||||
|
self.fake_cirrus.global_env = dict(bar="baz")
|
||||||
|
result = self.render_env(self.fake_cirrus, dict(foo="${bar}"))
|
||||||
|
self.assertDictEqual(result, dict(foo="baz"))
|
||||||
|
|
||||||
|
def test_readonly_params(self):
|
||||||
|
"""Verify global keys not modified while rendering substitutions."""
|
||||||
|
original_global_env = dict(
|
||||||
|
foo="foo", bar="bar", baz="baz", test="$item")
|
||||||
|
self.fake_cirrus.global_env = dict(**original_global_env) # A copy
|
||||||
|
original_env = dict(item="${foo}$bar${baz}")
|
||||||
|
env = dict(**original_env) # A copy
|
||||||
|
result = self.render_env(self.fake_cirrus, env)
|
||||||
|
self.assertDictEqual(self.fake_cirrus.global_env, original_global_env)
|
||||||
|
self.assertDictEqual(env, original_env)
|
||||||
|
self.assertDictEqual(result, dict(item="foobarbaz"))
|
||||||
|
|
||||||
|
def test_render_value(self):
|
||||||
|
"""Verify render_value() works by not modifying env parameter."""
|
||||||
|
self.fake_cirrus.global_env = dict(foo="foo", bar="bar", baz="baz")
|
||||||
|
original_env = dict(item="snafu")
|
||||||
|
env = dict(**original_env) # A copy
|
||||||
|
test_value = "$foo${bar}$baz $item"
|
||||||
|
expected_value = "foobarbaz snafu"
|
||||||
|
actual_value = self.render_value(self.fake_cirrus, test_value, env)
|
||||||
|
self.assertDictEqual(env, original_env)
|
||||||
|
self.assertEqual(actual_value, expected_value)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRenderTasks(TestBase):
|
||||||
|
"""Fixture for exercising Cirrus-CI task-level env. and matrix rendering behaviors."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Initialize before every test."""
|
||||||
|
super().setUp()
|
||||||
|
self.CCfg = self.cci_env.CirrusCfg
|
||||||
|
self.global_env = dict(foo="foo", bar="bar", baz="baz")
|
||||||
|
self.patchers = (
|
||||||
|
mock.patch.object(self.CCfg, 'get_type_image',
|
||||||
|
mock.Mock(return_value=(None, None))),
|
||||||
|
mock.patch.object(self.CCfg, 'init_task_type_image',
|
||||||
|
mock.Mock(return_value=None)))
|
||||||
|
for patcher in self.patchers:
|
||||||
|
patcher.start()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
"""Finalize after every test."""
|
||||||
|
for patcher in self.patchers:
|
||||||
|
patcher.stop()
|
||||||
|
super().tearDown()
|
||||||
|
|
||||||
|
def test_empty_in_empty_out(self):
|
||||||
|
"""Verify initializing with empty tasks and globals results in empty output."""
|
||||||
|
result = self.CCfg(dict(env=dict())).tasks
|
||||||
|
self.assertDictEqual(result, dict())
|
||||||
|
|
||||||
|
def test_simple_render(self):
|
||||||
|
"""Verify rendering of task local and global env. vars."""
|
||||||
|
env = dict(item="${foo}$bar${baz}", test="$undefined")
|
||||||
|
task = dict(something="ignored", env=env)
|
||||||
|
config = dict(env=self.global_env, test_task=task)
|
||||||
|
expected = {
|
||||||
|
"test": {
|
||||||
|
"alias": "test",
|
||||||
|
"env": {
|
||||||
|
"item": "foobarbaz",
|
||||||
|
"test": "${undefined}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = self.CCfg(config).tasks
|
||||||
|
self.assertDictEqual(result, expected)
|
||||||
|
|
||||||
|
def test_noenv_render(self):
|
||||||
|
"""Verify rendering of task w/o local env. vars."""
|
||||||
|
task = dict(something="ignored")
|
||||||
|
config = dict(env=self.global_env, test_task=task)
|
||||||
|
expected = {
|
||||||
|
"test": {
|
||||||
|
"alias": "test",
|
||||||
|
"env": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = self.CCfg(config).tasks
|
||||||
|
self.assertDictEqual(result, expected)
|
||||||
|
|
||||||
|
def test_simple_matrix(self):
|
||||||
|
"""Verify unrolling of a simple matrix containing two tasks."""
|
||||||
|
matrix1 = dict(name="test_matrix1", env=dict(item="${foo}bar"))
|
||||||
|
matrix2 = dict(name="test_matrix2", env=dict(item="foo$baz"))
|
||||||
|
task = dict(env=dict(something="untouched"), matrix=[matrix1, matrix2])
|
||||||
|
config = dict(env=self.global_env, test_task=task)
|
||||||
|
expected = {
|
||||||
|
"test_matrix1": {
|
||||||
|
"alias": "test",
|
||||||
|
"env": {
|
||||||
|
"item": "foobar",
|
||||||
|
"something": "untouched"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"test_matrix2": {
|
||||||
|
"alias": "test",
|
||||||
|
"env": {
|
||||||
|
"item": "foobaz",
|
||||||
|
"something": "untouched"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = self.CCfg(config).tasks
|
||||||
|
self.assertNotIn('test_task', result)
|
||||||
|
for task_name in ('test_matrix1', 'test_matrix2'):
|
||||||
|
self.assertIn(task_name, result)
|
||||||
|
self.assertDictEqual(expected[task_name], result[task_name])
|
||||||
|
self.assertDictEqual(result, expected)
|
||||||
|
|
||||||
|
def test_noenv_matrix(self):
|
||||||
|
"""Verify unrolling of single matrix w/o env. vars."""
|
||||||
|
matrix = dict(name="test_matrix")
|
||||||
|
task = dict(env=dict(something="untouched"), matrix=[matrix])
|
||||||
|
config = dict(env=self.global_env, test_task=task)
|
||||||
|
expected = {
|
||||||
|
"test_matrix": {
|
||||||
|
"alias": "test",
|
||||||
|
"env": {
|
||||||
|
"something": "untouched"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = self.CCfg(config).tasks
|
||||||
|
self.assertDictEqual(result, expected)
|
||||||
|
|
||||||
|
def test_rendered_name_matrix(self):
|
||||||
|
"""Verify env. values may be used in matrix names with spaces."""
|
||||||
|
test_foobar = dict(env=dict(item="$foo$bar", unique="item"))
|
||||||
|
bar_test = dict(name="$bar test", env=dict(item="${bar}${foo}", NAME="snafu"))
|
||||||
|
task = dict(name="test $item",
|
||||||
|
env=dict(something="untouched"),
|
||||||
|
matrix=[bar_test, test_foobar])
|
||||||
|
config = dict(env=self.global_env, blah_task=task)
|
||||||
|
expected = {
|
||||||
|
"test foobar": {
|
||||||
|
"alias": "blah",
|
||||||
|
"env": {
|
||||||
|
"item": "foobar",
|
||||||
|
"something": "untouched",
|
||||||
|
"unique": "item"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bar test": {
|
||||||
|
"alias": "blah",
|
||||||
|
"env": {
|
||||||
|
"NAME": "snafu",
|
||||||
|
"item": "barfoo",
|
||||||
|
"something": "untouched"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = self.CCfg(config).tasks
|
||||||
|
self.assertDictEqual(result, expected)
|
||||||
|
|
||||||
|
def test_bad_env_matrix(self):
|
||||||
|
"""Verify old-style 'matrix' key of 'env' attr. throws helpful error."""
|
||||||
|
env = dict(foo="bar", matrix=dict(will="error"))
|
||||||
|
task = dict(env=env)
|
||||||
|
config = dict(env=self.global_env, test_task=task)
|
||||||
|
err = StringIO()
|
||||||
|
with contextlib.suppress(SystemExit), mock.patch.object(self.cci_env,
|
||||||
|
'err', err.write):
|
||||||
|
self.assertRaises(ValueError, self.CCfg, config)
|
||||||
|
self.assertRegex(err.getvalue(), ".+'matrix'.+'env'.+'test'.+")
|
||||||
|
|
||||||
|
|
||||||
|
class TestCirrusCfg(TestBase):
|
||||||
|
"""Fixture to verify loading/parsing from an actual YAML file."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Initialize before every test."""
|
||||||
|
super().setUp()
|
||||||
|
self.CirrusCfg = self.cci_env.CirrusCfg
|
||||||
|
with open(os.path.join(TEST_DIRPATH, "actual_cirrus.yml")) as actual:
|
||||||
|
self.actual_cirrus = yaml.safe_load(actual)
|
||||||
|
|
||||||
|
def test_complex_cirrus_cfg(self):
|
||||||
|
"""Verify that CirrusCfg can be initialized from a complex .cirrus.yml."""
|
||||||
|
with open(os.path.join(TEST_DIRPATH, "expected_cirrus.yml")) as expected:
|
||||||
|
expected_cirrus = yaml.safe_load(expected)
|
||||||
|
actual_cfg = self.CirrusCfg(self.actual_cirrus)
|
||||||
|
self.assertSetEqual(set(actual_cfg.tasks.keys()),
|
||||||
|
set(expected_cirrus["tasks"].keys()))
|
||||||
|
|
||||||
|
def test_complex_type_image(self):
|
||||||
|
"""Verify that CirrusCfg initializes with expected image types and values."""
|
||||||
|
with open(os.path.join(TEST_DIRPATH, "expected_ti.yml")) as expected:
|
||||||
|
expected_ti = yaml.safe_load(expected)
|
||||||
|
actual_cfg = self.CirrusCfg(self.actual_cirrus)
|
||||||
|
self.assertEqual(len(actual_cfg.tasks), len(expected_ti))
|
||||||
|
actual_ti = {k: [v["inst_type"], v["inst_image"]]
|
||||||
|
for (k, v) in actual_cfg.tasks.items()}
|
||||||
|
self.maxDiff = None # show the full dif
|
||||||
|
self.assertDictEqual(actual_ti, expected_ti)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
|
@ -0,0 +1,21 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Load standardized test harness
|
||||||
|
SCRIPT_DIRPATH=$(dirname "${BASH_SOURCE[0]}")
|
||||||
|
source $SCRIPT_DIRPATH/testlib.sh || exit 1
|
||||||
|
|
||||||
|
# Must go through the top-level install script that chains to ../.install.sh
|
||||||
|
TEST_DIR=$(realpath "$SCRIPT_DIRPATH/../")
|
||||||
|
INSTALL_SCRIPT=$(realpath "$TEST_DIR/../bin/install_automation.sh")
|
||||||
|
TEMPDIR=$(mktemp -p "" -d "tmpdir_cirrus-ci_env_XXXXX")
|
||||||
|
|
||||||
|
test_cmd "Verify cirrus-ci_env can be installed under $TEMPDIR" \
|
||||||
|
0 'Installation complete for.+cirrus-ci_env' \
|
||||||
|
env INSTALL_PREFIX=$TEMPDIR $INSTALL_SCRIPT 0.0.0 cirrus-ci_env
|
||||||
|
|
||||||
|
test_cmd "Verify executing cirrus-ci_env.py gives 'usage' error message" \
|
||||||
|
2 'cirrus-ci_env.py: error: the following arguments are required:' \
|
||||||
|
$TEMPDIR/automation/bin/cirrus-ci_env.py
|
||||||
|
|
||||||
|
trap "rm -rf $TEMPDIR" EXIT
|
||||||
|
exit_with_status
|
|
@ -0,0 +1,50 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Load standardized test harness
|
||||||
|
SCRIPT_DIRPATH=$(dirname "${BASH_SOURCE[0]}")
|
||||||
|
source ${SCRIPT_DIRPATH}/testlib.sh || exit 1
|
||||||
|
|
||||||
|
TEST_DIR=$(realpath "$SCRIPT_DIRPATH/../")
|
||||||
|
SUBJ_FILEPATH="$TEST_DIR/${SUBJ_FILENAME%.sh}.py"
|
||||||
|
|
||||||
|
test_cmd "Verify no options results in help and an error-exit" \
|
||||||
|
2 "cirrus-ci_env.py: error: the following arguments are required:" \
|
||||||
|
$SUBJ_FILEPATH
|
||||||
|
|
||||||
|
test_cmd "Verify missing/invalid filename results in help and an error-exit" \
|
||||||
|
2 "No such file or directory" \
|
||||||
|
$SUBJ_FILEPATH /path/to/not/existing/file.yml \
|
||||||
|
|
||||||
|
test_cmd "Verify missing mode-option results in help message and an error-exit" \
|
||||||
|
2 "error: one of the arguments --list --envs --inst is required" \
|
||||||
|
$SUBJ_FILEPATH $SCRIPT_DIRPATH/actual_cirrus.yml
|
||||||
|
|
||||||
|
test_cmd "Verify valid-YAML w/o tasks results in help message and an error-exit" \
|
||||||
|
1 "ERROR: No Cirrus-CI tasks found in" \
|
||||||
|
$SUBJ_FILEPATH --list $SCRIPT_DIRPATH/expected_cirrus.yml
|
||||||
|
|
||||||
|
CIRRUS=$SCRIPT_DIRPATH/actual_cirrus.yml
|
||||||
|
test_cmd "Verify invalid task name results in help message and an error-exit" \
|
||||||
|
1 "ERROR: Unknown task name 'foobarbaz' from" \
|
||||||
|
$SUBJ_FILEPATH --env foobarbaz $CIRRUS
|
||||||
|
|
||||||
|
TASK_NAMES=$(<"$SCRIPT_DIRPATH/actual_task_names.txt")
|
||||||
|
echo "$TASK_NAMES" | while read LINE; do
|
||||||
|
test_cmd "Verify task '$LINE' appears in task-listing output" \
|
||||||
|
0 "$LINE" \
|
||||||
|
$SUBJ_FILEPATH --list $CIRRUS
|
||||||
|
done
|
||||||
|
|
||||||
|
test_cmd "Verify inherited instance image with env. var. reference is rendered" \
|
||||||
|
0 "container quay.io/libpod/fedora_podman:c6524344056676352" \
|
||||||
|
$SUBJ_FILEPATH --inst 'Ext. services' $CIRRUS
|
||||||
|
|
||||||
|
test_cmd "Verify DISTRO_NV env. var renders correctly from test task" \
|
||||||
|
0 'DISTRO_NV="fedora-33"' \
|
||||||
|
$SUBJ_FILEPATH --env 'int podman fedora-33 root container' $CIRRUS
|
||||||
|
|
||||||
|
test_cmd "Verify VM_IMAGE_NAME env. var renders correctly from test task" \
|
||||||
|
0 'VM_IMAGE_NAME="fedora-c6524344056676352"' \
|
||||||
|
$SUBJ_FILEPATH --env 'int podman fedora-33 root container' $CIRRUS
|
||||||
|
|
||||||
|
exit_with_status
|
|
@ -0,0 +1 @@
|
||||||
|
../../common/test/testlib.sh
|
|
@ -1,8 +1,11 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
# Installs cirrus-ci_retrospective system-wide. NOT intended to be used directly
|
# Installs cirrus-ci_retrospective system-wide. NOT intended to be used directly
|
||||||
# by humans, should only be used indirectly by running
|
# by humans, should only be used indirectly by running
|
||||||
# ../bin/install_automation.sh <ver> cirrus-ci_retrospective
|
# ../bin/install_automation.sh <ver> cirrus-ci_retrospective
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
||||||
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
FROM registry.fedoraproject.org/fedora-minimal:latest
|
FROM registry.fedoraproject.org/fedora-minimal:latest
|
||||||
RUN microdnf update -y && \
|
RUN microdnf update -y && \
|
||||||
microdnf install -y findutils jq git curl && \
|
microdnf install -y findutils jq git curl python3 && \
|
||||||
microdnf clean all && \
|
microdnf clean all && \
|
||||||
rm -rf /var/cache/dnf
|
rm -rf /var/cache/dnf
|
||||||
# Assume build is for development/manual testing purposes by default (automation should override with fixed version)
|
# Assume build is for development/manual testing purposes by default (automation should override with fixed version)
|
||||||
|
@ -25,4 +25,4 @@ ENV AUTOMATION_LIB_PATH="" \
|
||||||
# Optional (recommended) environment variables
|
# Optional (recommended) environment variables
|
||||||
ENV OUTPUT_JSON_FILE=""
|
ENV OUTPUT_JSON_FILE=""
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
ENTRYPOINT ["/bin/bash", "-c", "source /etc/profile && exec /usr/share/automation/bin/cirrus-ci_retrospective.sh"]
|
ENTRYPOINT ["/bin/bash", "-c", "source /etc/automation_environment && exec /usr/share/automation/bin/cirrus-ci_retrospective.sh"]
|
||||||
|
|
|
@ -13,7 +13,7 @@ to tests passing on a tagged commit.
|
||||||
|
|
||||||
# Example Github Action Workflow
|
# Example Github Action Workflow
|
||||||
|
|
||||||
On the master (default) branch of a repository (previously setup and running
|
On the 'main' (default) branch of a repository (previously setup and running
|
||||||
tasks in Cirrus-CI), add the following file:
|
tasks in Cirrus-CI), add the following file:
|
||||||
|
|
||||||
`.github/workflows/cirrus-ci_retrospective.yml`
|
`.github/workflows/cirrus-ci_retrospective.yml`
|
||||||
|
@ -38,7 +38,7 @@ jobs:
|
||||||
|
|
||||||
## Dependencies:
|
## Dependencies:
|
||||||
|
|
||||||
In addition to the basic `common` requirements (see [top-level README.md](../master/README.md))
|
In addition to the basic `common` requirements (see [top-level README.md](../README.md))
|
||||||
the following system packages (or their equivalents) are needed:
|
the following system packages (or their equivalents) are needed:
|
||||||
|
|
||||||
* curl
|
* curl
|
||||||
|
@ -66,7 +66,7 @@ the following system packages (or their equivalents) are needed:
|
||||||
## Warning
|
## Warning
|
||||||
|
|
||||||
Due to security concerns, Github Actions only supports execution vs check_suite events
|
Due to security concerns, Github Actions only supports execution vs check_suite events
|
||||||
from workflows already committed on the master branch. This makes it difficult to
|
from workflows already committed on the 'main' branch. This makes it difficult to
|
||||||
test implementations, since they will not execute until merged.
|
test implementations, since they will not execute until merged.
|
||||||
|
|
||||||
However, the output JSON does provide all the necessary details to re-create, then possibly
|
However, the output JSON does provide all the necessary details to re-create, then possibly
|
||||||
|
@ -123,12 +123,12 @@ containing multiple `tasks`.
|
||||||
```
|
```
|
||||||
|
|
||||||
*Important note about manual tasks:* Manually triggering an independent the task
|
*Important note about manual tasks:* Manually triggering an independent the task
|
||||||
***will not*** result in a new `check_suite`. Therefor, the cirrus-ci_retrospective
|
***will not*** result in a new `check_suite`. Therefore, the cirrus-ci_retrospective
|
||||||
action will not execute again, irrespective of pass, fail or any other manual task status.
|
action will not execute again, irrespective of pass, fail or any other manual task status.
|
||||||
Also, if any task in Cirrus-CI is dependent on a manual task, the build itself will not
|
Also, if any task in Cirrus-CI is dependent on a manual task, the build itself will not
|
||||||
conclude until the manual task is triggered and completes (pass, fail, or other).
|
conclude until the manual task is triggered and completes (pass, fail, or other).
|
||||||
|
|
||||||
## After merging pull request 34 into master branch (merge commit added)
|
## After merging pull request 34 into main branch (merge commit added)
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
|
@ -136,7 +136,7 @@ conclude until the manual task is triggered and completes (pass, fail, or other)
|
||||||
"build": {
|
"build": {
|
||||||
"id": "foobarbaz"
|
"id": "foobarbaz"
|
||||||
"changeIdInRepo": "232bae5d8ffb6082393e7543e4e53f978152f98a",
|
"changeIdInRepo": "232bae5d8ffb6082393e7543e4e53f978152f98a",
|
||||||
"branch": "master",
|
"branch": "main",
|
||||||
"pullRequest": null,
|
"pullRequest": null,
|
||||||
...cut...
|
...cut...
|
||||||
}
|
}
|
||||||
|
@ -169,6 +169,6 @@ Given a "conclusion" task name in Cirrus-CI (e.g. `cirrus-ci/test_success`):
|
||||||
`'.[] | select(.name == "cirrus-ci/test_success") | .build.pullRequest'`
|
`'.[] | select(.name == "cirrus-ci/test_success") | .build.pullRequest'`
|
||||||
|
|
||||||
* Obtain the HEAD commit ID used by Cirrus-CI for the build (always available)
|
* Obtain the HEAD commit ID used by Cirrus-CI for the build (always available)
|
||||||
'.[] | select(.name == "cirrus-ci/test_success") | .build.changeIdInRepo'
|
`'.[] | select(.name == "cirrus-ci/test_success") | .build.changeIdInRepo'`
|
||||||
|
|
||||||
* ...todo: add more
|
* ...todo: add more
|
||||||
|
|
|
@ -64,7 +64,7 @@ curl_post() {
|
||||||
die "Expecting non-empty data argument"
|
die "Expecting non-empty data argument"
|
||||||
|
|
||||||
[[ -n "$token" ]] || \
|
[[ -n "$token" ]] || \
|
||||||
dbg "### Warning: \$GITHUB_TOKEN is empty, performing unauthenticated query" > /dev/stderr
|
dbg "### Warning: \$GITHUB_TOKEN is empty, performing unauthenticated query" >> /dev/stderr
|
||||||
# Don't expose secrets on any command-line
|
# Don't expose secrets on any command-line
|
||||||
local headers_tmpf
|
local headers_tmpf
|
||||||
local headers_tmpf=$(tmpfile headers)
|
local headers_tmpf=$(tmpfile headers)
|
||||||
|
@ -74,14 +74,14 @@ content-type: application/json
|
||||||
${token:+authorization: Bearer $token}
|
${token:+authorization: Bearer $token}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# Avoid needing to pass large strings on te command-line
|
# Avoid needing to pass large strings on the command-line
|
||||||
local data_tmpf=$(tmpfile data)
|
local data_tmpf=$(tmpfile data)
|
||||||
echo "$data" > "$data_tmpf"
|
echo "$data" > "$data_tmpf"
|
||||||
|
|
||||||
local curl_cmd="$CURL --silent --request POST --url $url --header @$headers_tmpf --data @$data_tmpf"
|
local curl_cmd="$CURL --silent --request POST --url $url --header @$headers_tmpf --data @$data_tmpf"
|
||||||
dbg "### Executing '$curl_cmd'"
|
dbg "### Executing '$curl_cmd'"
|
||||||
local ret="0"
|
local ret="0"
|
||||||
$curl_cmd > /dev/stdout || ret=$?
|
$curl_cmd >> /dev/stdout || ret=$?
|
||||||
|
|
||||||
# Don't leave secrets lying around in files
|
# Don't leave secrets lying around in files
|
||||||
rm -f "$headers_tmpf" "$data_tmpf" &> /dev/null
|
rm -f "$headers_tmpf" "$data_tmpf" &> /dev/null
|
||||||
|
@ -99,9 +99,9 @@ filter_json() {
|
||||||
dbg "### Validating JSON in '$json_file'"
|
dbg "### Validating JSON in '$json_file'"
|
||||||
# Confirm input json is valid and make filter problems easier to debug (below)
|
# Confirm input json is valid and make filter problems easier to debug (below)
|
||||||
local tmp_json_file=$(tmpfile json)
|
local tmp_json_file=$(tmpfile json)
|
||||||
if ! jq . < "$json_file" > "$tmp_json_file"; then
|
if ! jq -e . < "$json_file" > "$tmp_json_file"; then
|
||||||
rm -f "$tmp_json_file"
|
rm -f "$tmp_json_file"
|
||||||
# JQ has alrady shown an error message
|
# JQ has already shown an error message
|
||||||
die "Error from jq relating to JSON: $(cat $json_file)"
|
die "Error from jq relating to JSON: $(cat $json_file)"
|
||||||
else
|
else
|
||||||
dbg "### JSON found to be valid"
|
dbg "### JSON found to be valid"
|
||||||
|
@ -111,7 +111,7 @@ filter_json() {
|
||||||
|
|
||||||
dbg "### Applying filter '$filter'"
|
dbg "### Applying filter '$filter'"
|
||||||
if ! jq --indent 4 "$filter" < "$json_file" > "$tmp_json_file"; then
|
if ! jq --indent 4 "$filter" < "$json_file" > "$tmp_json_file"; then
|
||||||
# JQ has alrady shown an error message
|
# JQ has already shown an error message
|
||||||
rm -f "$tmp_json_file"
|
rm -f "$tmp_json_file"
|
||||||
die "Error from jq relating to JSON: $(cat $json_file)"
|
die "Error from jq relating to JSON: $(cat $json_file)"
|
||||||
fi
|
fi
|
||||||
|
@ -147,11 +147,6 @@ url_query_filter_test() {
|
||||||
[[ "$ret" -eq "0" ]] || \
|
[[ "$ret" -eq "0" ]] || \
|
||||||
die "Curl command exited with non-zero code: $ret"
|
die "Curl command exited with non-zero code: $ret"
|
||||||
|
|
||||||
if grep -q "error" "$curl_outputf"; then
|
|
||||||
# Barely passable attempt to catch GraphQL query errors
|
|
||||||
die "Found the word 'error' in curl output: $(cat $curl_outputf)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Validates both JSON and filter, updates $curl_outputf
|
# Validates both JSON and filter, updates $curl_outputf
|
||||||
filter_json "$filter" "$curl_outputf"
|
filter_json "$filter" "$curl_outputf"
|
||||||
if [[ -n "$test_args" ]]; then
|
if [[ -n "$test_args" ]]; then
|
||||||
|
|
|
@ -12,7 +12,7 @@ test_cmd "Verify cirrus-ci_retrospective can be installed under $TEMPDIR" \
|
||||||
env INSTALL_PREFIX=$TEMPDIR $INSTALL_SCRIPT 0.0.0 github cirrus-ci_retrospective
|
env INSTALL_PREFIX=$TEMPDIR $INSTALL_SCRIPT 0.0.0 github cirrus-ci_retrospective
|
||||||
|
|
||||||
test_cmd "Verify executing cirrus-ci_retrospective.sh gives 'Expecting' error message" \
|
test_cmd "Verify executing cirrus-ci_retrospective.sh gives 'Expecting' error message" \
|
||||||
2 '::error::.+Expecting' \
|
2 '::error.+Expecting' \
|
||||||
env AUTOMATION_LIB_PATH=$TEMPDIR/automation/lib $TEMPDIR/automation/bin/cirrus-ci_retrospective.sh
|
env AUTOMATION_LIB_PATH=$TEMPDIR/automation/lib $TEMPDIR/automation/bin/cirrus-ci_retrospective.sh
|
||||||
|
|
||||||
trap "rm -rf $TEMPDIR" EXIT
|
trap "rm -rf $TEMPDIR" EXIT
|
||||||
|
|
|
@ -45,7 +45,7 @@ for required_var in ${req_env_vars[@]}; do
|
||||||
export $required_var="$invalid_value"
|
export $required_var="$invalid_value"
|
||||||
test_cmd \
|
test_cmd \
|
||||||
"Verify exeuction w/ \$$required_var='$invalid_value' (instead of '$valid_value') fails with helpful error message." \
|
"Verify exeuction w/ \$$required_var='$invalid_value' (instead of '$valid_value') fails with helpful error message." \
|
||||||
2 "::error::.+\\\$$required_var.+'$invalid_value'" \
|
2 "::error.+\\\$$required_var.+'$invalid_value'" \
|
||||||
$SUBJ_FILEPATH
|
$SUBJ_FILEPATH
|
||||||
export $required_var="$valid_value"
|
export $required_var="$valid_value"
|
||||||
done
|
done
|
||||||
|
@ -61,21 +61,21 @@ EOF
|
||||||
export GITHUB_EVENT_PATH=$MOCK_EVENT_JSON_FILEPATH
|
export GITHUB_EVENT_PATH=$MOCK_EVENT_JSON_FILEPATH
|
||||||
|
|
||||||
test_cmd "Verify expected error when fed empty mock event JSON file" \
|
test_cmd "Verify expected error when fed empty mock event JSON file" \
|
||||||
1 "::error::.+check_suite.+key" \
|
1 "::error.+check_suite.+key" \
|
||||||
$SUBJ_FILEPATH
|
$SUBJ_FILEPATH
|
||||||
|
|
||||||
cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
||||||
{"check_suite":{}}
|
{"check_suite":{}}
|
||||||
EOF
|
EOF
|
||||||
test_cmd "Verify expected error when fed invalid check_suite value in mock event JSON file" \
|
test_cmd "Verify expected error when fed invalid check_suite value in mock event JSON file" \
|
||||||
1 "::error::.+check_suite.+type.+null" \
|
1 "::error.+check_suite.+type.+null" \
|
||||||
$SUBJ_FILEPATH
|
$SUBJ_FILEPATH
|
||||||
|
|
||||||
cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
||||||
{"check_suite": {}, "action": "foobar"}
|
{"check_suite": {}, "action": "foobar"}
|
||||||
EOF
|
EOF
|
||||||
test_cmd "Verify error and message containing incorrect value from mock event JSON file" \
|
test_cmd "Verify error and message containing incorrect value from mock event JSON file" \
|
||||||
1 "::error::.+check_suite.+foobar" \
|
1 "::error.+check_suite.+foobar" \
|
||||||
$SUBJ_FILEPATH
|
$SUBJ_FILEPATH
|
||||||
|
|
||||||
cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
||||||
|
@ -89,7 +89,7 @@ cat << EOF > "$MOCK_EVENT_JSON_FILEPATH"
|
||||||
{"check_suite": {"app":{"id":null}}, "action": "completed"}
|
{"check_suite": {"app":{"id":null}}, "action": "completed"}
|
||||||
EOF
|
EOF
|
||||||
test_cmd "Verify expected error when 'app' id is wrong type in mock event JSON file" \
|
test_cmd "Verify expected error when 'app' id is wrong type in mock event JSON file" \
|
||||||
1 "::error::.+integer.+null" \
|
1 "::error.+integer.+null" \
|
||||||
$SUBJ_FILEPATH
|
$SUBJ_FILEPATH
|
||||||
|
|
||||||
# Must always happen last
|
# Must always happen last
|
||||||
|
|
|
@ -109,7 +109,7 @@ test_cmd \
|
||||||
'^4 $' \
|
'^4 $' \
|
||||||
cat "$TEST_JSON_FILE"
|
cat "$TEST_JSON_FILE"
|
||||||
|
|
||||||
# Makes checking temp-files writen by curl_post() easier
|
# Makes checking temp-files written by curl_post() easier
|
||||||
TMPDIR=$(mktemp -d -p "$_TMPDIR" "tmpdir_curl_XXXXX")
|
TMPDIR=$(mktemp -d -p "$_TMPDIR" "tmpdir_curl_XXXXX")
|
||||||
# Set up a mock for argument checking
|
# Set up a mock for argument checking
|
||||||
_CURL="$CURL"
|
_CURL="$CURL"
|
||||||
|
|
|
@ -29,13 +29,34 @@ our $Default_Yml = '.cirrus.yml';
|
||||||
# Try to leave one or two greens at the end: these will be used
|
# Try to leave one or two greens at the end: these will be used
|
||||||
# for terminal nodes (e.g. "success")
|
# for terminal nodes (e.g. "success")
|
||||||
our @Colors = qw(
|
our @Colors = qw(
|
||||||
blue orange red darkgoldenrod firebrick1 orangered4
|
orange red darkgoldenrod firebrick1 orangered4
|
||||||
darkturquoise deeppink deepskyblue3 coral dodgerblue
|
darkturquoise deeppink deepskyblue3 coral dodgerblue
|
||||||
bisque2 indigo darkorchid1 palevioletred2 slateblue4
|
bisque2 indigo darkorchid1 palevioletred2 slateblue4
|
||||||
cornsilk4 deepskyblue4 navajowhite2
|
cornsilk4 deepskyblue4 navajowhite2
|
||||||
slateblue1 yellow4 brown chartreuse seagreen3 darkgreen
|
slateblue1 yellow4 brown chartreuse seagreen3 darkgreen
|
||||||
);
|
);
|
||||||
|
|
||||||
|
# Color overrides: use sys/int/etc colors from github-ci-highlight Greasemonkey
|
||||||
|
#
|
||||||
|
# https://github.com/edsantiago/greasemonkey/tree/master/github-ci-highlight
|
||||||
|
#
|
||||||
|
# No sane way to fetch colors automatically, so, just duplicate.
|
||||||
|
our %Color_Override = (
|
||||||
|
# FG BG
|
||||||
|
apiv2 => 'fff:c0c',
|
||||||
|
bud => '000:fc0',
|
||||||
|
compose => '660:fff',
|
||||||
|
integration => '000:960',
|
||||||
|
system => '000:cf9',
|
||||||
|
unit => '000:f99',
|
||||||
|
upgrade => 'f0c:fff',
|
||||||
|
'(?<!image.)build' => '00f:fff',
|
||||||
|
'image.build' => 'f85:fff',
|
||||||
|
validate => '0c0:fff',
|
||||||
|
machine => '330:0ff',
|
||||||
|
success => '000:0f0',
|
||||||
|
);
|
||||||
|
|
||||||
# END user-customizable section
|
# END user-customizable section
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
@ -176,11 +197,12 @@ sub write_img {
|
||||||
|
|
||||||
# Annotate: add signature line at lower left
|
# Annotate: add signature line at lower left
|
||||||
# FIXME: include git repo info?
|
# FIXME: include git repo info?
|
||||||
if (grep { -x "$_/convert" } split(":", $ENV{PATH})) {
|
if (grep { -x "$_/magick" } split(":", $ENV{PATH})) {
|
||||||
unlink $img_out_tmp;
|
unlink $img_out_tmp;
|
||||||
my $signature = strftime("Generated %Y-%m-%dT%H:%M:%S%z by $ME v$VERSION", localtime);
|
my $signature = strftime("Generated %Y-%m-%dT%H:%M:%S%z by $ME v$VERSION", localtime);
|
||||||
my @cmd = (
|
my @cmd = (
|
||||||
"convert",
|
"magick",
|
||||||
|
$img_out,
|
||||||
'-family' => 'Courier',
|
'-family' => 'Courier',
|
||||||
'-pointsize' => '12',
|
'-pointsize' => '12',
|
||||||
# '-style' => 'Normal', # Argh! This gives us Bold!?
|
# '-style' => 'Normal', # Argh! This gives us Bold!?
|
||||||
|
@ -188,7 +210,7 @@ sub write_img {
|
||||||
'-fill' => '#000',
|
'-fill' => '#000',
|
||||||
'-gravity' => 'SouthWest',
|
'-gravity' => 'SouthWest',
|
||||||
"-annotate", "+5+5", $signature,
|
"-annotate", "+5+5", $signature,
|
||||||
"$img_out" => "$img_out_tmp"
|
$img_out_tmp
|
||||||
);
|
);
|
||||||
if (system(@cmd) == 0) {
|
if (system(@cmd) == 0) {
|
||||||
rename $img_out_tmp => $img_out;
|
rename $img_out_tmp => $img_out;
|
||||||
|
@ -199,6 +221,11 @@ sub write_img {
|
||||||
}
|
}
|
||||||
|
|
||||||
chmod 0444 => $img_out;
|
chmod 0444 => $img_out;
|
||||||
|
|
||||||
|
# Special case for SVG: we can make a dynamic map with actions on hover
|
||||||
|
if ($format eq 'svg') {
|
||||||
|
make_dynamic_svg($img_out);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -223,6 +250,111 @@ sub write_gv {
|
||||||
or die "$ME: Could not rename $tmpfile: $!\n";
|
or die "$ME: Could not rename $tmpfile: $!\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
######################
|
||||||
|
# make_dynamic_svg # Add :hover elements
|
||||||
|
######################
|
||||||
|
sub make_dynamic_svg {
|
||||||
|
my $svg_path = shift;
|
||||||
|
|
||||||
|
my $svg_tmp = "$svg_path.tmp.$$";
|
||||||
|
open my $fh_in, '<', $svg_path
|
||||||
|
or die "$ME: cannot read $svg_path: $!\n";
|
||||||
|
|
||||||
|
my %skip;
|
||||||
|
my %only_if;
|
||||||
|
my %node_is_onlyif;
|
||||||
|
|
||||||
|
my $current_node = '';
|
||||||
|
my $current_title = '';
|
||||||
|
|
||||||
|
# Pass 1: read the original SVG, getting a sense for all nodes
|
||||||
|
while (my $line = <$fh_in>) {
|
||||||
|
# Group, generated by GraphViz itself: node1, node2, node3, ...
|
||||||
|
if ($line =~ m!<g id="(.*?)"!) {
|
||||||
|
$current_node = $1;
|
||||||
|
}
|
||||||
|
# Cirrus task name, e.g. validate, system_test
|
||||||
|
elsif ($line =~ m!<title>(.*)</title>!) {
|
||||||
|
# Convert 'docker-py_test' to just 'dockerpy_test'
|
||||||
|
($current_title = $1) =~ s/&.*?;//;
|
||||||
|
}
|
||||||
|
# This will be in a <text>...</text> element
|
||||||
|
elsif ($line =~ /\[SKIP:\s+(.*?)\]/) {
|
||||||
|
$skip{lc $1}{$current_title} = 1;
|
||||||
|
}
|
||||||
|
# This too
|
||||||
|
elsif ($line =~ /\[only\s+if\s+(.*?)\]/) {
|
||||||
|
$node_is_onlyif{$current_node} = 1 unless lc($1) eq 'pr';
|
||||||
|
$only_if{lc $1}{$current_title} = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pass 2: reread, and now write (filtering) to custom-tweaked svg
|
||||||
|
seek $fh_in, 0, 0;
|
||||||
|
|
||||||
|
open my $fh_out, '>', $svg_tmp
|
||||||
|
or die "$ME: Cannot create $svg_tmp: $!\n";
|
||||||
|
while (my $line = <$fh_in>) {
|
||||||
|
my $skip;
|
||||||
|
my $onlyif;
|
||||||
|
|
||||||
|
# e.g. the 'release' step in podman only runs on tag
|
||||||
|
if ($line =~ m!<g id="(.*?)"!) {
|
||||||
|
if ($node_is_onlyif{$1}) {
|
||||||
|
$line =~ s!>! opacity="0.2">!;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
# Cirrus task name, e.g. validate, system_test
|
||||||
|
elsif ($line =~ m!<title>(.*)</title>!) {
|
||||||
|
($current_title = $1) =~ s/&.*?;//;
|
||||||
|
}
|
||||||
|
elsif ($line =~ /\[SKIP:\s+(.*?)\]/) {
|
||||||
|
$skip = lc $1;
|
||||||
|
$line =~ s!<text !<text id="skip_${skip}_${current_title}" !;
|
||||||
|
}
|
||||||
|
elsif ($line =~ /\[only\s+if\s+(.*?)\]/) {
|
||||||
|
$onlyif = lc $1;
|
||||||
|
# FIXME: how to set default opacity for this block?
|
||||||
|
$line =~ s!<text !<text id="onlyif_${onlyif}_${current_title}" !;
|
||||||
|
}
|
||||||
|
|
||||||
|
print { $fh_out } $line;
|
||||||
|
|
||||||
|
# This is where we add value
|
||||||
|
if ($skip) {
|
||||||
|
for my $t (sort keys %{$skip{$skip}}) {
|
||||||
|
print { $fh_out } qq{<set attributeName="opacity" to="0.1" begin="skip_${skip}_${t}.mouseover" end="skip_${skip}_${t}.mouseout" />\n};
|
||||||
|
}
|
||||||
|
if ($only_if{$skip}) {
|
||||||
|
for my $t (sort keys %{$only_if{$skip}}) {
|
||||||
|
print { $fh_out } qq{<set attributeName="opacity" to="0.1" begin="onlyif_${skip}_${t}.mouseover" end="onlyif_${skip}_${t}.mouseout" />\n};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ($onlyif && $onlyif ne 'pr') {
|
||||||
|
for my $t (sort keys %{$only_if{$onlyif}}) {
|
||||||
|
print { $fh_out } qq{<set attributeName="opacity" to="1.0" begin="onlyif_${onlyif}_${t}.mouseover" end="onlyif_${onlyif}_${t}.mouseout" />\n};
|
||||||
|
}
|
||||||
|
if ($skip{$onlyif}) {
|
||||||
|
for my $t (sort keys %{$skip{$onlyif}}) {
|
||||||
|
print { $fh_out } qq{<set attributeName="opacity" to="1.0" begin="skip_${onlyif}_${t}.mouseover" end="skip_${onlyif}_${t}.mouseout" />\n};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
close $fh_out
|
||||||
|
or die "$ME: Error writing $fh_out: $!\n";
|
||||||
|
close $fh_in;
|
||||||
|
|
||||||
|
# Done. Replace original SVG.
|
||||||
|
chmod 0444 => $svg_tmp
|
||||||
|
or die "$ME: Cannot chmod 0444 $svg_tmp: $!\n";
|
||||||
|
rename $svg_tmp => $svg_path
|
||||||
|
or die "$ME: Could not rename $svg_tmp -> $svg_path: $!\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
package Task;
|
package Task;
|
||||||
|
@ -293,18 +425,44 @@ sub _size {
|
||||||
}
|
}
|
||||||
|
|
||||||
##############
|
##############
|
||||||
# _by_size # sort helper, for putting big nodes at bottom
|
# _by_type # sort helper, for clustering int/sys/machine tests
|
||||||
##############
|
##############
|
||||||
sub _by_size {
|
sub _by_type {
|
||||||
_size($a) <=> _size($b) ||
|
my $ax = $a->{name};
|
||||||
$a->{name} cmp $b->{name};
|
my $bx = $b->{name};
|
||||||
|
|
||||||
|
# The big test types, in the order we want to show them
|
||||||
|
my @types = qw(integration system bud machine);
|
||||||
|
my %type_order = map { $types[$_] => $_ } (0..$#types);
|
||||||
|
my $type_re = join('|', @types);
|
||||||
|
|
||||||
|
if ($ax =~ /($type_re)/) {
|
||||||
|
my $a_type = $1;
|
||||||
|
if ($bx =~ /($type_re)/) {
|
||||||
|
my $b_type = $1;
|
||||||
|
|
||||||
|
return $type_order{$a_type} <=> $type_order{$b_type}
|
||||||
|
|| $ax cmp $bx;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
# e.g., $b is "win installer", $a is in @types, $b < $a
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elsif ($bx =~ /($type_re)/) {
|
||||||
|
# e.g., $a is "win installer", $b is in @types, $a < $b
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Neither a nor b is in @types
|
||||||
|
$ax cmp $bx;
|
||||||
}
|
}
|
||||||
|
|
||||||
sub depended_on_by {
|
sub depended_on_by {
|
||||||
my $self = shift;
|
my $self = shift;
|
||||||
|
|
||||||
if (my $d = $self->{_depended_on_by}) {
|
if (my $d = $self->{_depended_on_by}) {
|
||||||
my @d = sort _by_size map { $self->{_tasklist}->find($_) } @$d;
|
my @d = sort _by_type map { $self->{_tasklist}->find($_) } @$d;
|
||||||
return @d;
|
return @d;
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
|
@ -317,10 +475,20 @@ sub subtasks {
|
||||||
my @subtasks;
|
my @subtasks;
|
||||||
if (my $m = $self->{yml}{matrix}) {
|
if (my $m = $self->{yml}{matrix}) {
|
||||||
for my $item (@$m) {
|
for my $item (@$m) {
|
||||||
my $name = $self->_expand_matrix_name( $item );
|
my $name = $self->_expand_name( $item );
|
||||||
push @subtasks, "- " . $name . '\l';
|
push @subtasks, "- " . $name . '\l';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
elsif (my $name = $self->{yml}{name}) {
|
||||||
|
if ($name =~ /\$/) {
|
||||||
|
# A name with dollars, like "$TEST_FLAVOR $PODBIN $DISTRO_NV etc",
|
||||||
|
# is worth a box entry showing that expansion. This will be only
|
||||||
|
# one line (as opposed to one or more for matrix stanzas) but
|
||||||
|
# the expansion is still useful so reader can know what arch
|
||||||
|
# and OS this is running on.
|
||||||
|
push @subtasks, '= ' . $self->_expand_name( $name ) . '\l';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return @subtasks;
|
return @subtasks;
|
||||||
}
|
}
|
||||||
|
@ -353,9 +521,13 @@ sub env_matrix {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub _expand_matrix_name {
|
##################
|
||||||
|
# _expand_name # Iteratively expand $FOO or ${FOO} or a matrix name
|
||||||
|
##################
|
||||||
|
sub _expand_name {
|
||||||
my $self = shift;
|
my $self = shift;
|
||||||
my $matrix_item = shift;
|
my $item = shift;
|
||||||
|
my $name;
|
||||||
|
|
||||||
# Environment: start with top-level env defined for entire yml file
|
# Environment: start with top-level env defined for entire yml file
|
||||||
my %env;
|
my %env;
|
||||||
|
@ -368,18 +540,26 @@ sub _expand_matrix_name {
|
||||||
%env = (%env, %$env);
|
%env = (%env, %$env);
|
||||||
}
|
}
|
||||||
|
|
||||||
# ...then finally with env in the matrix
|
# ...then finally, if this is a matrix item, with its env
|
||||||
if (my $m_env = $matrix_item->{env}) {
|
if ((ref($item)||'') eq 'HASH') {
|
||||||
|
if (my $m_env = $item->{env}) {
|
||||||
%env = (%env, %$m_env);
|
%env = (%env, %$m_env);
|
||||||
}
|
}
|
||||||
|
$name = $item->{name};
|
||||||
|
}
|
||||||
|
|
||||||
my $name = $matrix_item->{name} || $self->{yml}{name} || $self->name || '?';
|
$name //= $self->{yml}{name} || $self->name || '?';
|
||||||
|
|
||||||
|
while ($name =~ /\$/) {
|
||||||
|
my $name_old = $name;
|
||||||
|
|
||||||
# FIXME: need to clean this up!
|
|
||||||
$name =~ s/\$\{(.*?)\}/$env{$1} || "\$$1"/ge;
|
$name =~ s/\$\{(.*?)\}/$env{$1} || "\$$1"/ge;
|
||||||
$name =~ s/\$([A-Z_]+)/$env{$1} || "\$$1"/ge;
|
$name =~ s/\$([A-Z_]+)/$env{$1} || "\$$1"/ge;
|
||||||
$name =~ s/\$\{(.*?)\}/$env{$1} || "\$$1"/ge; # and again with curlies
|
|
||||||
$name =~ s/\$([A-Z_]+)/$env{$1} || "\$$1"/ge; # and again without
|
# Don't infinite-loop
|
||||||
|
last if $name_old eq $name;
|
||||||
|
print "$name_old -> $name\n" if $debug;
|
||||||
|
}
|
||||||
|
|
||||||
return $name;
|
return $name;
|
||||||
}
|
}
|
||||||
|
@ -558,14 +738,28 @@ sub _draw_boxes {
|
||||||
my $node = $task->{name};
|
my $node = $task->{name};
|
||||||
return if $self->{_gv}{done}{$node}++;
|
return if $self->{_gv}{done}{$node}++;
|
||||||
|
|
||||||
# Terminal nodes: pop from the end of the color list (expect greens)
|
|
||||||
my $color;
|
my $color;
|
||||||
|
my $fill = '';
|
||||||
|
for my $term (sort keys %Color_Override) {
|
||||||
|
if ($node =~ /(^|_)${term}(_|$)/) {
|
||||||
|
my ($fg, $bg) = split ':', $Color_Override{$term};
|
||||||
|
$fg =~ s/(.)/${1}0/g;
|
||||||
|
$bg =~ s/(.)/${1}0/g;
|
||||||
|
$color = qq{"#$fg\"};
|
||||||
|
$fill = qq{ fillcolor="#$bg" style=filled};
|
||||||
|
last;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Terminal nodes: pop from the end of the color list (expect greens)
|
||||||
|
if (! $color) {
|
||||||
if (! $task->depended_on_by) {
|
if (! $task->depended_on_by) {
|
||||||
$color = pop @{$self->{_gv}{colors}};
|
$color = pop @{$self->{_gv}{colors}};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$color = shift @{$self->{_gv}{colors}};
|
$color = shift @{$self->{_gv}{colors}};
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if (! $color) {
|
if (! $color) {
|
||||||
warn "$ME: Ran out of colors\n";
|
warn "$ME: Ran out of colors\n";
|
||||||
$color = 'black';
|
$color = 'black';
|
||||||
|
@ -584,12 +778,144 @@ sub _draw_boxes {
|
||||||
$label .= join('', @env_matrix);
|
$label .= join('', @env_matrix);
|
||||||
}
|
}
|
||||||
|
|
||||||
$self->{_gv}{dot} .= " \"$node\" [shape=$shape style=bold color=$color fontcolor=$color";
|
# Special cases (all hardcoded) for conditional tasks.
|
||||||
$self->{_gv}{dot} .= " label=\"$node\\l\|$label\"" if $label;
|
if (my $only_if = $task->{yml}{only_if}) {
|
||||||
|
$shape = 'record';
|
||||||
|
$label .= '|' if $label;
|
||||||
|
|
||||||
|
# Collapse whitespace, and remove leading/trailing
|
||||||
|
$only_if =~ s/[\s\n]+/ /g;
|
||||||
|
$only_if =~ s/^\s+|\s+$//g;
|
||||||
|
|
||||||
|
# 2024-06-18 Paul CI skips
|
||||||
|
if ($only_if =~ m{\$CIRRUS_PR\s+==\s+''\s+.*\$CIRRUS_CHANGE_TITLE.*CI:ALL.*changesInclude.*test}) {
|
||||||
|
$label .= "[SKIP if not needed]";
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2020-10 used in automation_images repo
|
||||||
|
elsif ($only_if eq q{$CIRRUS_PR != ''}) {
|
||||||
|
$label .= "[only if PR]";
|
||||||
|
}
|
||||||
|
# 2020-10 used in automation_images repo
|
||||||
|
elsif ($only_if eq q{$CIRRUS_PR == '' && $CIRRUS_CRON != ''}) {
|
||||||
|
$label .= "[only if cron]";
|
||||||
|
}
|
||||||
|
# 2022-09
|
||||||
|
elsif ($only_if eq q{$CIRRUS_PR != '' && $CIRRUS_CHANGE_TITLE =~ '.*CI:BUILD.*'}) {
|
||||||
|
$label .= "[only if PR + CI:BUILD]";
|
||||||
|
}
|
||||||
|
elsif ($only_if eq q{${CIRRUS_CRON} == 'main'}) {
|
||||||
|
$label .= "[only if cron on main]";
|
||||||
|
}
|
||||||
|
# 2022-09
|
||||||
|
elsif ($only_if eq q{$CIRRUS_CRON == 'multiarch'}) {
|
||||||
|
$label .= "[only if cron multiarch]";
|
||||||
|
}
|
||||||
|
elsif ($only_if eq q{$CIRRUS_CRON != 'multiarch'}) {
|
||||||
|
$label .= "[SKIP: cron multiarch]";
|
||||||
|
}
|
||||||
|
# used in podman
|
||||||
|
elsif ($only_if eq q{$CIRRUS_TAG != ''}) {
|
||||||
|
$label .= "[only if tag]";
|
||||||
|
}
|
||||||
|
# PR #13114
|
||||||
|
elsif ($only_if =~ /CIRRUS_CHANGE.*release.*bump/i) {
|
||||||
|
$label .= "[only on release PR]";
|
||||||
|
}
|
||||||
|
# swagger
|
||||||
|
elsif ($only_if =~ /CIRRUS_CHANGE_TITLE.*CI:BUILD.*CIRRUS_CRON.*multiarch/) {
|
||||||
|
$label .= "[SKIP: CI:BUILD or cron-multiarch]";
|
||||||
|
}
|
||||||
|
# buildah-bud rootless is only run in nightly treadmill
|
||||||
|
elsif ($only_if =~ /\$CIRRUS_CRON\s+==\s+'treadmill'/) {
|
||||||
|
$label .= "[only on cron treadmill]";
|
||||||
|
}
|
||||||
|
# "bench stuff" job: Only run on merge and never for cirrus-cron.
|
||||||
|
elsif ($only_if =~ /CIRRUS_BRANCH\s+==\s+'main'\s+&&\s+\$CIRRUS_CRON\s+==\s+''/) {
|
||||||
|
$label .= "[only on merge]";
|
||||||
|
}
|
||||||
|
elsif ($only_if =~ /CIRRUS_BRANCH\s+!=~\s+'v.*-rhel'\s+&&\s+\$CIRRUS_BASE_BRANCH\s+!=~\s+'v.*-rhel'/) {
|
||||||
|
$label .= "[only if no RHEL release]";
|
||||||
|
}
|
||||||
|
elsif ($only_if =~ /CIRRUS_CHANGE_TITLE.*CI:BUILD.*CIRRUS_CHANGE_TITLE.*CI:MACHINE/s) {
|
||||||
|
$label .= "[SKIP: CI:BUILD or CI:MACHINE]";
|
||||||
|
}
|
||||||
|
elsif ($only_if =~ /CIRRUS_CHANGE_TITLE\s+!=.*CI:MACHINE.*CIRRUS_BRANCH.*main.*CIRRUS_BASE_BRANCH.*main.*\)/s) {
|
||||||
|
$label .= "[only if: main]";
|
||||||
|
}
|
||||||
|
|
||||||
|
# automation_images
|
||||||
|
elsif ($only_if eq q{$CIRRUS_CRON == '' && $CIRRUS_BRANCH == $CIRRUS_DEFAULT_BRANCH}) {
|
||||||
|
$label .= "[only if DEFAULT_BRANCH and not cron]";
|
||||||
|
}
|
||||||
|
elsif ($only_if eq q{$CIRRUS_PR != '' && $CIRRUS_PR_LABELS !=~ ".*no_build-push.*"}) {
|
||||||
|
$label .= "[only if PR, but not no_build-push]";
|
||||||
|
}
|
||||||
|
elsif ($only_if eq q{$CIRRUS_CRON == 'lifecycle'}) {
|
||||||
|
$label .= "[only on cron=lifecycle]";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
warn "$ME: unexpected only_if: $only_if\n";
|
||||||
|
$label .= "[only if: $only_if]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Special case for manual (or other??) trigger type
|
||||||
|
my $trigger = '';
|
||||||
|
if (my $t = $task->{yml}{trigger_type}) {
|
||||||
|
$trigger = "\\l(TRIGGER: " . uc($t) . ")";
|
||||||
|
}
|
||||||
|
|
||||||
|
# Special cases (also hardcoded) for tasks that are skipped.
|
||||||
|
if (my $skip = $task->{yml}{skip}) {
|
||||||
|
$shape = 'record';
|
||||||
|
$label .= '|' if $label && $label !~ /SKIP/;
|
||||||
|
|
||||||
|
# Collapse whitespace, and remove leading/trailing
|
||||||
|
$skip =~ s/[\s\n]+/ /g;
|
||||||
|
$skip =~ s/^\s+|\s+$//g;
|
||||||
|
|
||||||
|
my @reasons;
|
||||||
|
|
||||||
|
# automation_images
|
||||||
|
if ($skip eq q{$CIRRUS_CHANGE_TITLE =~ '.*CI:DOCS.*' || $CIRRUS_CHANGE_TITLE =~ '.*CI:TOOLING.*'}) {
|
||||||
|
push @reasons, "CI:DOCS or CI:TOOLING";
|
||||||
|
}
|
||||||
|
elsif ($skip eq q{$CIRRUS_CHANGE_TITLE =~ '.*CI:DOCS.*'}) {
|
||||||
|
push @reasons, "CI:DOCS";
|
||||||
|
}
|
||||||
|
elsif ($skip eq '$CI == $CI') {
|
||||||
|
push @reasons, "DISABLED MANUALLY";
|
||||||
|
}
|
||||||
|
elsif ($skip) {
|
||||||
|
warn "$ME: unexpected skip '$skip'\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (@reasons) {
|
||||||
|
$label .= join('', map { "[SKIP: $_]\\l" } @reasons);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$label .= "[SKIPPABLE: $skip]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$self->{_gv}{dot} .= " \"$node\" [shape=$shape style=bold color=$color$fill fontcolor=$color";
|
||||||
|
if ($label) {
|
||||||
|
(my $nodename = $node) =~ s/_/ /g;
|
||||||
|
$self->{_gv}{dot} .= " label=\"$nodename$trigger\\l\|$label\"";
|
||||||
|
}
|
||||||
$self->{_gv}{dot} .= "]\n";
|
$self->{_gv}{dot} .= "]\n";
|
||||||
|
|
||||||
for my $dep ($task->depended_on_by) {
|
for my $dep ($task->depended_on_by) {
|
||||||
$self->{_gv}{dot} .= " \"$node\" -> \"$dep->{name}\" [color=$color]\n";
|
my $c = $color;
|
||||||
|
# For custom-override boxes, when FG is black or very light, use
|
||||||
|
# background color for arrow.
|
||||||
|
if ($c =~ /000000/ || $c =~ /f.f.f./) {
|
||||||
|
if ($fill =~ /\"#([0-9a-f]{6})\"/) {
|
||||||
|
$c = qq{"#$1"};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$self->{_gv}{dot} .= " \"$node\" -> \"$dep->{name}\" [color=$c]\n";
|
||||||
$self->_draw_boxes($dep);
|
$self->_draw_boxes($dep);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -641,7 +967,11 @@ sub _mergekeys
|
||||||
foreach my $inherit (@$inherits)
|
foreach my $inherit (@$inherits)
|
||||||
{
|
{
|
||||||
$inherit = _mergekeys($inherit, $resolveStack);
|
$inherit = _mergekeys($inherit, $resolveStack);
|
||||||
%$ref = (%$inherit, %$ref);
|
|
||||||
|
# ** changed by esm **: shallow hash merge fails for
|
||||||
|
# remote_sys_aarch64 (as of 2022-11) because it just <<'s
|
||||||
|
# the entire local_sys_aarch64 including its env hash
|
||||||
|
deepmerge($ref, $inherit);
|
||||||
}
|
}
|
||||||
delete $ref->{'<<'};
|
delete $ref->{'<<'};
|
||||||
}
|
}
|
||||||
|
@ -659,6 +989,79 @@ sub _mergekeys
|
||||||
return $ref;
|
return $ref;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
###############
|
||||||
|
# deepmerge # deep recursive merge for hashes; needed for cirrus matrices
|
||||||
|
###############
|
||||||
|
sub deepmerge {
|
||||||
|
my ($ref, $inherit) = @_;
|
||||||
|
|
||||||
|
for my $k (keys %$inherit) {
|
||||||
|
my $r_ref = ref($ref->{$k}) || '';
|
||||||
|
my $i_ref = ref($inherit->{$k}) || '';
|
||||||
|
|
||||||
|
if ($i_ref eq 'HASH') {
|
||||||
|
# Two hashes
|
||||||
|
deepmerge($ref->{$k}, $inherit->{$k});
|
||||||
|
}
|
||||||
|
elsif ($i_ref eq 'ARRAY') {
|
||||||
|
# Two arrays; this is how .cirrus.yml does matrix env settings
|
||||||
|
$ref->{$k} //= [];
|
||||||
|
for my $element (@{$inherit->{$k}}) {
|
||||||
|
my $e_ref = ref($element) || '';
|
||||||
|
if ($e_ref eq 'HASH') {
|
||||||
|
# The only situation we handle is a hashref with one
|
||||||
|
# key named 'env', whose value is a hash. If that ever
|
||||||
|
# changes, deal with it then.
|
||||||
|
my $e_formatted = format_env($element);
|
||||||
|
|
||||||
|
my $found;
|
||||||
|
for my $in_k (@{$ref->{$k}}) {
|
||||||
|
$found ||= (format_env($in_k) eq $e_formatted);
|
||||||
|
}
|
||||||
|
push @{$ref->{$k}}, $element unless $found;
|
||||||
|
}
|
||||||
|
elsif ($e_ref eq 'ARRAY') {
|
||||||
|
die "FIXME, deepmerge cannot handle arrays of arrays";
|
||||||
|
}
|
||||||
|
elsif (! grep { $_ eq $element } @{$ref->{$k}}) {
|
||||||
|
# ref is an array, but element is a scalar
|
||||||
|
push @{$ref->{$k}}, $element;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
# i is scalar
|
||||||
|
# 2023-04-23 do not override existing values! Anchors are used
|
||||||
|
# only for filling in defaults. Anything explicitly set in
|
||||||
|
# the YAML block is what we really want.
|
||||||
|
$ref->{$k} //= $inherit->{$k};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
################
|
||||||
|
# format_env # Return an easily-compared string based on a hashref
|
||||||
|
################
|
||||||
|
sub format_env {
|
||||||
|
my $href = shift;
|
||||||
|
|
||||||
|
# href must be: { env => { foo => "bar", ... } }
|
||||||
|
ref($href) eq 'HASH'
|
||||||
|
or die "$ME: Internal error: format_env(): arg is not a hash";
|
||||||
|
exists $href->{env}
|
||||||
|
or die "$ME: Internal error: format_env(): arg does not have 'env' key";
|
||||||
|
ref($href->{env}) eq 'HASH'
|
||||||
|
or die "$ME: Internal error: format_env(): arg->{env} is not a hash";
|
||||||
|
keys(%{$href}) == 1
|
||||||
|
or die "$ME: Internal error: format_env(): %{arg} has too many keys";
|
||||||
|
|
||||||
|
join("--", map {
|
||||||
|
sprintf("%s=%s", $_, $href->{env}{$_})
|
||||||
|
} sort keys %{$href->{env}});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# END omg kludge for dealing with anchors
|
# END omg kludge for dealing with anchors
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
|
|
@ -90,14 +90,14 @@ end_task:
|
||||||
- "middle_2"
|
- "middle_2"
|
||||||
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
|
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
|
||||||
"real_name_of_initial" [shape=ellipse style=bold color=a fontcolor=a]
|
"real_name_of_initial" [shape=ellipse style=bold color=a fontcolor=a]
|
||||||
|
"real_name_of_initial" -> "end" [color=a]
|
||||||
|
"end" [shape=ellipse style=bold color=z fontcolor=z]
|
||||||
"real_name_of_initial" -> "middle_1" [color=a]
|
"real_name_of_initial" -> "middle_1" [color=a]
|
||||||
"middle_1" [shape=ellipse style=bold color=b fontcolor=b]
|
"middle_1" [shape=ellipse style=bold color=b fontcolor=b]
|
||||||
"middle_1" -> "end" [color=b]
|
"middle_1" -> "end" [color=b]
|
||||||
"end" [shape=ellipse style=bold color=z fontcolor=z]
|
|
||||||
"real_name_of_initial" -> "middle_2" [color=a]
|
"real_name_of_initial" -> "middle_2" [color=a]
|
||||||
"middle_2" [shape=ellipse style=bold color=c fontcolor=c]
|
"middle_2" [shape=ellipse style=bold color=c fontcolor=c]
|
||||||
"middle_2" -> "end" [color=c]
|
"middle_2" -> "end" [color=c]
|
||||||
"real_name_of_initial" -> "end" [color=a]
|
|
||||||
|
|
||||||
<<<<<<<<<<<<<<<<<< env interpolation 1
|
<<<<<<<<<<<<<<<<<< env interpolation 1
|
||||||
env:
|
env:
|
||||||
|
@ -510,41 +510,41 @@ success_task:
|
||||||
|
|
||||||
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
|
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
|
||||||
"automation" [shape=ellipse style=bold color=a fontcolor=a]
|
"automation" [shape=ellipse style=bold color=a fontcolor=a]
|
||||||
"automation" -> "success" [color=a]
|
|
||||||
"success" [shape=ellipse style=bold color=z fontcolor=z]
|
|
||||||
"automation" -> "build" [color=a]
|
"automation" -> "build" [color=a]
|
||||||
"build" [shape=record style=bold color=b fontcolor=b label="build\l|- Build for fedora-32\l- Build for fedora-31\l- Build for ubuntu-20\l- Build for ubuntu-19\l"]
|
"build" [shape=record style=bold color="#0000f0" fillcolor="#f0f0f0" style=filled fontcolor="#0000f0" label="build\l|- Build for fedora-32\l- Build for fedora-31\l- Build for ubuntu-20\l- Build for ubuntu-19\l"]
|
||||||
"build" -> "bindings" [color=b]
|
"build" -> "alt_build" [color="#0000f0"]
|
||||||
"bindings" [shape=ellipse style=bold color=c fontcolor=c]
|
"alt_build" [shape=record style=bold color="#0000f0" fillcolor="#f0f0f0" style=filled fontcolor="#0000f0" label="alt build\l|- Build Each Commit\l- Windows Cross\l- Build Without CGO\l- Build varlink API\l- Static build\l- Test build RPM\l"]
|
||||||
"bindings" -> "success" [color=c]
|
"alt_build" -> "success" [color="#0000f0"]
|
||||||
"build" -> "docker-py_test" [color=b]
|
"success" [shape=ellipse style=bold color="#000000" fillcolor="#00f000" style=filled fontcolor="#000000"]
|
||||||
"docker-py_test" [shape=ellipse style=bold color=d fontcolor=d]
|
"build" -> "bindings" [color="#0000f0"]
|
||||||
"docker-py_test" -> "success" [color=d]
|
"bindings" [shape=ellipse style=bold color=b fontcolor=b]
|
||||||
"build" -> "endpoint" [color=b]
|
"bindings" -> "success" [color=b]
|
||||||
"endpoint" [shape=ellipse style=bold color=e fontcolor=e]
|
"build" -> "docker-py_test" [color="#0000f0"]
|
||||||
"endpoint" -> "success" [color=e]
|
"docker-py_test" [shape=ellipse style=bold color=c fontcolor=c]
|
||||||
"build" -> "osx_cross" [color=b]
|
"docker-py_test" -> "success" [color=c]
|
||||||
"osx_cross" [shape=ellipse style=bold color=f fontcolor=f]
|
"build" -> "endpoint" [color="#0000f0"]
|
||||||
"osx_cross" -> "success" [color=f]
|
"endpoint" [shape=ellipse style=bold color=d fontcolor=d]
|
||||||
"build" -> "swagger" [color=b]
|
"endpoint" -> "success" [color=d]
|
||||||
"swagger" [shape=ellipse style=bold color=g fontcolor=g]
|
"build" -> "osx_cross" [color="#0000f0"]
|
||||||
"swagger" -> "success" [color=g]
|
"osx_cross" [shape=ellipse style=bold color=e fontcolor=e]
|
||||||
"build" -> "validate" [color=b]
|
"osx_cross" -> "success" [color=e]
|
||||||
"validate" [shape=ellipse style=bold color=h fontcolor=h]
|
"build" -> "success" [color="#0000f0"]
|
||||||
"validate" -> "success" [color=h]
|
"build" -> "swagger" [color="#0000f0"]
|
||||||
"build" -> "vendor" [color=b]
|
"swagger" [shape=ellipse style=bold color=f fontcolor=f]
|
||||||
"vendor" [shape=ellipse style=bold color=i fontcolor=i]
|
"swagger" -> "success" [color=f]
|
||||||
"vendor" -> "success" [color=i]
|
"build" -> "unit_test" [color="#0000f0"]
|
||||||
"build" -> "unit_test" [color=b]
|
"unit_test" [shape=record style=bold color="#000000" fillcolor="#f09090" style=filled fontcolor="#000000" label="unit test\l|- Unit tests on fedora-32\l- Unit tests on fedora-31\l- Unit tests on ubuntu-20\l- Unit tests on ubuntu-19\l"]
|
||||||
"unit_test" [shape=record style=bold color=j fontcolor=j label="unit_test\l|- Unit tests on fedora-32\l- Unit tests on fedora-31\l- Unit tests on ubuntu-20\l- Unit tests on ubuntu-19\l"]
|
"unit_test" -> "success" [color="#f09090"]
|
||||||
"unit_test" -> "success" [color=j]
|
"build" -> "validate" [color="#0000f0"]
|
||||||
"build" -> "alt_build" [color=b]
|
"validate" [shape=record style=bold color="#00c000" fillcolor="#f0f0f0" style=filled fontcolor="#00c000" label="validate\l|= Validate fedora-32 Build\l"]
|
||||||
"alt_build" [shape=record style=bold color=k fontcolor=k label="alt_build\l|- Build Each Commit\l- Windows Cross\l- Build Without CGO\l- Build varlink API\l- Static build\l- Test build RPM\l"]
|
"validate" -> "success" [color="#00c000"]
|
||||||
"alt_build" -> "success" [color=k]
|
"build" -> "vendor" [color="#0000f0"]
|
||||||
"build" -> "success" [color=b]
|
"vendor" [shape=ellipse style=bold color=g fontcolor=g]
|
||||||
"ext_svc_check" [shape=ellipse style=bold color=l fontcolor=l]
|
"vendor" -> "success" [color=g]
|
||||||
"ext_svc_check" -> "success" [color=l]
|
"automation" -> "success" [color=a]
|
||||||
"ext_svc_check" -> "build" [color=l]
|
"ext_svc_check" [shape=ellipse style=bold color=h fontcolor=h]
|
||||||
"smoke" [shape=ellipse style=bold color=m fontcolor=m]
|
"ext_svc_check" -> "build" [color=h]
|
||||||
"smoke" -> "success" [color=m]
|
"ext_svc_check" -> "success" [color=h]
|
||||||
"smoke" -> "build" [color=m]
|
"smoke" [shape=ellipse style=bold color=i fontcolor=i]
|
||||||
|
"smoke" -> "build" [color=i]
|
||||||
|
"smoke" -> "success" [color=i]
|
||||||
|
|
|
@ -10,7 +10,7 @@ set -eo pipefail
|
||||||
SCRIPT_BASEDIR="$(basename $0)"
|
SCRIPT_BASEDIR="$(basename $0)"
|
||||||
|
|
||||||
badusage() {
|
badusage() {
|
||||||
echo "Incorrect usage: $SCRIPT_BASEDIR) <command> [options]" > /dev/stderr
|
echo "Incorrect usage: $SCRIPT_BASEDIR) <command> [options]" >> /dev/stderr
|
||||||
echo "ERROR: $1"
|
echo "ERROR: $1"
|
||||||
exit 121
|
exit 121
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ done
|
||||||
usage "The number of retry attempts must be greater than 1, not '$attempts'"
|
usage "The number of retry attempts must be greater than 1, not '$attempts'"
|
||||||
|
|
||||||
((sleep_ms>10)) || \
|
((sleep_ms>10)) || \
|
||||||
usage "The number of miliseconds must be greater than 10, not '$sleep_ms'"
|
usage "The number of milliseconds must be greater than 10, not '$sleep_ms'"
|
||||||
|
|
||||||
for exit_code in "${exit_codes[@]}"; do
|
for exit_code in "${exit_codes[@]}"; do
|
||||||
if ((exit_code<0)) || ((exit_code>254)); then
|
if ((exit_code<0)) || ((exit_code>254)); then
|
||||||
|
|
|
@ -28,7 +28,7 @@ automation_version() {
|
||||||
if [[ -n "$_avcache" ]]; then
|
if [[ -n "$_avcache" ]]; then
|
||||||
echo "$_avcache"
|
echo "$_avcache"
|
||||||
else
|
else
|
||||||
echo "Error determining version number" > /dev/stderr
|
echo "Error determining version number" >> /dev/stderr
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ AUTOMATION_LIB_PATH="${AUTOMATION_LIB_PATH:-$(dirname ${BASH_SOURCE[0]})}"
|
||||||
|
|
||||||
# Filename list must be hard-coded
|
# Filename list must be hard-coded
|
||||||
# When installed, other files may be present in lib directory
|
# When installed, other files may be present in lib directory
|
||||||
COMMON_LIBS="anchors.sh defaults.sh utils.sh console_output.sh"
|
COMMON_LIBS="anchors.sh defaults.sh platform.sh utils.sh console_output.sh"
|
||||||
for filename in $COMMON_LIBS; do
|
for filename in $COMMON_LIBS; do
|
||||||
source $(dirname "$BASH_SOURCE[0]}")/$filename
|
source $(dirname "$BASH_SOURCE[0]}")/$filename
|
||||||
done
|
done
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# A Library of contextual console output-related operations.
|
# A Library of contextual console output-related operations.
|
||||||
# Intended for use by other scripts, not to be executed directly.
|
# Intended for use by other scripts, not to be executed directly.
|
||||||
|
|
||||||
|
# shellcheck source=common/lib/defaults.sh
|
||||||
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/defaults.sh
|
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/defaults.sh
|
||||||
|
|
||||||
# helper, not intended for use outside this file
|
# helper, not intended for use outside this file
|
||||||
|
@ -10,10 +11,11 @@ _rel_path() {
|
||||||
if [[ -z "$1" ]]; then
|
if [[ -z "$1" ]]; then
|
||||||
echo "<stdin>"
|
echo "<stdin>"
|
||||||
else
|
else
|
||||||
local abs_path=$(realpath "$1")
|
local abs_path rel_path abs_path_len rel_path_len
|
||||||
local rel_path=$(realpath --relative-to=. $abs_path)
|
abs_path=$(realpath "$1")
|
||||||
local abs_path_len=${#abs_path}
|
rel_path=$(realpath --relative-to=. $abs_path)
|
||||||
local rel_path_len=${#rel_path}
|
abs_path_len=${#abs_path}
|
||||||
|
rel_path_len=${#rel_path}
|
||||||
if ((abs_path_len <= rel_path_len)); then
|
if ((abs_path_len <= rel_path_len)); then
|
||||||
echo "$abs_path"
|
echo "$abs_path"
|
||||||
else
|
else
|
||||||
|
@ -24,9 +26,10 @@ _rel_path() {
|
||||||
|
|
||||||
# helper, not intended for use outside this file
|
# helper, not intended for use outside this file
|
||||||
_ctx() {
|
_ctx() {
|
||||||
|
local shortest_source_path grandparent_func
|
||||||
# Caller's caller details
|
# Caller's caller details
|
||||||
local shortest_source_path=$(_rel_path "${BASH_SOURCE[3]}")
|
shortest_source_path=$(_rel_path "${BASH_SOURCE[3]}")
|
||||||
local grandparent_func="${FUNCNAME[2]}"
|
grandparent_func="${FUNCNAME[2]}"
|
||||||
[[ -n "$grandparent_func" ]] || \
|
[[ -n "$grandparent_func" ]] || \
|
||||||
grandparent_func="main"
|
grandparent_func="main"
|
||||||
echo "$shortest_source_path:${BASH_LINENO[2]} in ${FUNCNAME[3]}()"
|
echo "$shortest_source_path:${BASH_LINENO[2]} in ${FUNCNAME[3]}()"
|
||||||
|
@ -34,9 +37,10 @@ _ctx() {
|
||||||
|
|
||||||
# helper, not intended for use outside this file.
|
# helper, not intended for use outside this file.
|
||||||
_fmt_ctx() {
|
_fmt_ctx() {
|
||||||
local stars="************************************************"
|
local stars prefix message
|
||||||
local prefix="${1:-no prefix given}"
|
stars="************************************************"
|
||||||
local message="${2:-no message given}"
|
prefix="${1:-no prefix given}"
|
||||||
|
message="${2:-no message given}"
|
||||||
echo "$stars"
|
echo "$stars"
|
||||||
echo "$prefix ($(_ctx))"
|
echo "$prefix ($(_ctx))"
|
||||||
echo "$stars"
|
echo "$stars"
|
||||||
|
@ -44,30 +48,41 @@ _fmt_ctx() {
|
||||||
|
|
||||||
# Print a highly-visible message to stderr. Usage: warn <msg>
|
# Print a highly-visible message to stderr. Usage: warn <msg>
|
||||||
warn() {
|
warn() {
|
||||||
_fmt_ctx "$WARNING_MSG_PREFIX ${1:-no warning message given}" > /dev/stderr
|
_fmt_ctx "$WARNING_MSG_PREFIX ${1:-no warning message given}" >> /dev/stderr
|
||||||
}
|
}
|
||||||
|
|
||||||
# Same as warn() but exit non-zero or with given exit code
|
# Same as warn() but exit non-zero or with given exit code
|
||||||
# usage: die <msg> [exit-code]
|
# usage: die <msg> [exit-code]
|
||||||
die() {
|
die() {
|
||||||
_fmt_ctx "$ERROR_MSG_PREFIX ${1:-no error message given}" > /dev/stderr
|
_fmt_ctx "$ERROR_MSG_PREFIX ${1:-no error message given}" >> /dev/stderr
|
||||||
local exit_code=${2:-1}
|
local exit_code=${2:-1}
|
||||||
((exit_code==0)) || \
|
((exit_code==0)) || \
|
||||||
exit $exit_code
|
exit $exit_code
|
||||||
}
|
}
|
||||||
|
|
||||||
dbg() {
|
dbg() {
|
||||||
if ((DEBUG)); then
|
local shortest_source_path
|
||||||
local shortest_source_path=$(_rel_path "${BASH_SOURCE[1]}")
|
if ((A_DEBUG)); then
|
||||||
|
shortest_source_path=$(_rel_path "${BASH_SOURCE[1]}")
|
||||||
(
|
(
|
||||||
echo
|
echo
|
||||||
echo "$DEBUG_MSG_PREFIX ${1:-No debugging message given} ($shortest_source_path:${BASH_LINENO[0]} in ${FUNCNAME[1]}())"
|
echo "$DEBUG_MSG_PREFIX ${1:-No debugging message given} ($shortest_source_path:${BASH_LINENO[0]} in ${FUNCNAME[1]}())"
|
||||||
) > /dev/stderr
|
) >> /dev/stderr
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
msg() {
|
msg() {
|
||||||
echo "${1:-No message specified}" &> /dev/stderr
|
echo "${1:-No message specified}" &>> /dev/stderr
|
||||||
|
}
|
||||||
|
|
||||||
|
# Mimic set +x for a single command, along with calling location and line.
|
||||||
|
showrun() {
|
||||||
|
local -a context
|
||||||
|
# Tried using readarray, it broke tests for some reason, too lazy to investigate.
|
||||||
|
# shellcheck disable=SC2207
|
||||||
|
context=($(caller 0))
|
||||||
|
echo "+ $* # ${context[2]}:${context[0]} in ${context[1]}()" >> /dev/stderr
|
||||||
|
"$@"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Expects stdin, indents every input line right by 4 spaces
|
# Expects stdin, indents every input line right by 4 spaces
|
||||||
|
@ -101,7 +116,7 @@ show_env_vars() {
|
||||||
warn "The \$SECRET_ENV_RE var. unset/empty: Not filtering sensitive names!"
|
warn "The \$SECRET_ENV_RE var. unset/empty: Not filtering sensitive names!"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for env_var_name in $(awk 'BEGIN{for(v in ENVIRON) print v}' | grep -Eiv "$filter_rx" | sort -u); do
|
for env_var_name in $(awk 'BEGIN{for(v in ENVIRON) print v}' | grep -Eiv "$filter_rx" | sort); do
|
||||||
|
|
||||||
line="${env_var_name}=${!env_var_name}"
|
line="${env_var_name}=${!env_var_name}"
|
||||||
msg " $line"
|
msg " $line"
|
||||||
|
|
|
@ -7,16 +7,12 @@ CI="${CI:-false}" # true: _unlikely_ human-presence at the controls.
|
||||||
[[ $CI == "false" ]] || CI='true' # Err on the side of automation
|
[[ $CI == "false" ]] || CI='true' # Err on the side of automation
|
||||||
|
|
||||||
# Default to NOT running in debug-mode unless set non-zero
|
# Default to NOT running in debug-mode unless set non-zero
|
||||||
DEBUG=${DEBUG:-0}
|
A_DEBUG=${A_DEBUG:-0}
|
||||||
# Conditionals like ((DEBUG)) easier than checking "true"/"False"
|
# Conditionals like ((A_DEBUG)) easier than checking "true"/"False"
|
||||||
( test "$DEBUG" -eq 0 || test "$DEBUG" -ne 0 ) &>/dev/null || DEBUG=1 # assume true when non-integer
|
( test "$A_DEBUG" -eq 0 || test "$A_DEBUG" -ne 0 ) &>/dev/null || \
|
||||||
|
A_DEBUG=1 # assume true when non-integer
|
||||||
|
|
||||||
# String prefixes to use when printing messages to the console
|
# String prefixes to use when printing messages to the console
|
||||||
DEBUG_MSG_PREFIX="${DEBUG_MSG_PREFIX:-DEBUG:}"
|
DEBUG_MSG_PREFIX="${DEBUG_MSG_PREFIX:-DEBUG:}"
|
||||||
WARNING_MSG_PREFIX="${WARNING_MSG_PREFIX:-WARNING:}"
|
WARNING_MSG_PREFIX="${WARNING_MSG_PREFIX:-WARNING:}"
|
||||||
ERROR_MSG_PREFIX="${ERROR_MSG_PREFIX:-ERROR:}"
|
ERROR_MSG_PREFIX="${ERROR_MSG_PREFIX:-ERROR:}"
|
||||||
|
|
||||||
# When non-empty, should contain a regular expression that matches
|
|
||||||
# any known or potential env. vars containing secrests or other
|
|
||||||
# sensitive values. For example `(.+PASSWORD.*)|(.+SECRET.*)|(.+TOKEN.*)`
|
|
||||||
SECRET_ENV_RE=''
|
|
||||||
|
|
|
@ -0,0 +1,95 @@
|
||||||
|
|
||||||
|
# Library of os/platform related definitions and functions
|
||||||
|
# Not intended to be executed directly
|
||||||
|
|
||||||
|
OS_RELEASE_VER="${OS_RELEASE_VER:-$(source /etc/os-release; echo $VERSION_ID | tr -d '.')}"
|
||||||
|
OS_RELEASE_ID="${OS_RELEASE_ID:-$(source /etc/os-release; echo $ID)}"
|
||||||
|
OS_REL_VER="${OS_REL_VER:-$OS_RELEASE_ID-$OS_RELEASE_VER}"
|
||||||
|
|
||||||
|
# Ensure no user-input prompts in an automation context
|
||||||
|
export DEBIAN_FRONTEND="${DEBIAN_FRONTEND:-noninteractive}"
|
||||||
|
# _TEST_UID only needed for unit-testing
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
if ((UID)) || ((_TEST_UID)); then
|
||||||
|
SUDO="${SUDO:-sudo}"
|
||||||
|
if [[ "$OS_RELEASE_ID" =~ (ubuntu)|(debian) ]]; then
|
||||||
|
if [[ ! "$SUDO" =~ noninteractive ]]; then
|
||||||
|
SUDO="$SUDO env DEBIAN_FRONTEND=$DEBIAN_FRONTEND"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
# Regex defining all CI-related env. vars. necessary for all possible
|
||||||
|
# testing operations on all platforms and versions. This is necessary
|
||||||
|
# to avoid needlessly passing through global/system values across
|
||||||
|
# contexts, such as host->container or root->rootless user
|
||||||
|
#
|
||||||
|
# List of envariables which must be EXACT matches
|
||||||
|
PASSTHROUGH_ENV_EXACT="${PASSTHROUGH_ENV_EXACT:-DEST_BRANCH|IMAGE_SUFFIX|DISTRO_NV|SCRIPT_BASE}"
|
||||||
|
|
||||||
|
# List of envariable patterns which must match AT THE BEGINNING of the name.
|
||||||
|
PASSTHROUGH_ENV_ATSTART="${PASSTHROUGH_ENV_ATSTART:-CI|TEST}"
|
||||||
|
|
||||||
|
# List of envariable patterns which can match ANYWHERE in the name
|
||||||
|
PASSTHROUGH_ENV_ANYWHERE="${PASSTHROUGH_ENV_ANYWHERE:-_NAME|_FQIN}"
|
||||||
|
|
||||||
|
# List of expressions to exclude env. vars for security reasons
|
||||||
|
SECRET_ENV_RE="${SECRET_ENV_RE:-(^PATH$)|(^BASH_FUNC)|(^_.*)|(.*PASSWORD.*)|(.*TOKEN.*)|(.*SECRET.*)}"
|
||||||
|
|
||||||
|
# Return a list of environment variables that should be passed through
|
||||||
|
# to lower levels (tests in containers, or via ssh to rootless).
|
||||||
|
# We return the variable names only, not their values. It is up to our
|
||||||
|
# caller to reference values.
|
||||||
|
passthrough_envars() {
|
||||||
|
local passthrough_env_re="(^($PASSTHROUGH_ENV_EXACT)\$)|(^($PASSTHROUGH_ENV_ATSTART))|($PASSTHROUGH_ENV_ANYWHERE)"
|
||||||
|
local envar
|
||||||
|
|
||||||
|
for envar in SECRET_ENV_RE PASSTHROUGH_ENV_EXACT PASSTHROUGH_ENV_ATSTART PASSTHROUGH_ENV_ANYWHERE passthrough_env_re; do
|
||||||
|
if [[ -z "${!envar}" ]]; then
|
||||||
|
echo "Error: Required env. var. \$$envar is unset or empty in call to passthrough_envars()" >> /dev/stderr
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Warning: Will pass env. vars. matching the following regex:
|
||||||
|
$passthrough_env_re" >> /dev/stderr
|
||||||
|
|
||||||
|
compgen -A variable | grep -Ev "$SECRET_ENV_RE" | grep -E "$passthrough_env_re"
|
||||||
|
}
|
||||||
|
|
||||||
|
# On more occasions than we'd like, it's necessary to put temporary
|
||||||
|
# platform-specific workarounds in place. To help ensure they'll
|
||||||
|
# actually be temporary, it's useful to place a time limit on them.
|
||||||
|
# This function accepts two arguments:
|
||||||
|
# - A (required) future date of the form YYYYMMDD (UTC based).
|
||||||
|
# - An (optional) message string to display upon expiry of the timebomb.
|
||||||
|
timebomb() {
|
||||||
|
local expire="$1"
|
||||||
|
|
||||||
|
if ! expr "$expire" : '[0-9]\{8\}$' > /dev/null; then
|
||||||
|
echo "timebomb: '$expire' must be UTC-based and of the form YYYYMMDD"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $(date -u +%Y%m%d) -lt $(date -u -d "$expire" +%Y%m%d) ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
declare -a frame
|
||||||
|
read -a frame < <(caller)
|
||||||
|
|
||||||
|
cat << EOF >> /dev/stderr
|
||||||
|
***********************************************************
|
||||||
|
* TIME BOMB EXPIRED!
|
||||||
|
*
|
||||||
|
* >> ${frame[1]}:${frame[0]}: ${2:-No reason given, tsk tsk}
|
||||||
|
*
|
||||||
|
* Temporary workaround expired on ${expire:0:4}-${expire:4:2}-${expire:6:2}.
|
||||||
|
*
|
||||||
|
* Please review the above source file and either remove the
|
||||||
|
* workaround or, if absolutely necessary, extend it.
|
||||||
|
*
|
||||||
|
* Please also check for other timebombs while you're at it.
|
||||||
|
***********************************************************
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
# Library of utility functions for manipulating/controling bash-internals
|
# Library of utility functions for manipulating/controlling bash-internals
|
||||||
# Not intended to be executed directly
|
# Not intended to be executed directly
|
||||||
|
|
||||||
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/console_output.sh
|
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/console_output.sh
|
||||||
|
@ -56,7 +56,7 @@ not_contains(){
|
||||||
# Usage: err_retry <attempts> <sleep ms> <exit_code> <command> <args>
|
# Usage: err_retry <attempts> <sleep ms> <exit_code> <command> <args>
|
||||||
# Where:
|
# Where:
|
||||||
# attempts: The number of attempts to make.
|
# attempts: The number of attempts to make.
|
||||||
# sleep ms: Number of miliseconds to sleep (doubles every attempt)
|
# sleep ms: Number of milliseconds to sleep (doubles every attempt)
|
||||||
# exit_code: Space separated list of exit codes to retry. If empty
|
# exit_code: Space separated list of exit codes to retry. If empty
|
||||||
# then any non-zero code will be considered for retry.
|
# then any non-zero code will be considered for retry.
|
||||||
#
|
#
|
||||||
|
|
|
@ -12,13 +12,13 @@ source "$AUTOMATION_LIB_PATH/common_lib.sh"
|
||||||
set +e
|
set +e
|
||||||
|
|
||||||
test_function() {
|
test_function() {
|
||||||
DEBUG=1 dbg "Test dbg message"
|
A_DEBUG=1 dbg "Test dbg message"
|
||||||
warn "Test warning message"
|
warn "Test warning message"
|
||||||
msg "Test msg message"
|
msg "Test msg message"
|
||||||
die "Test die message" 0
|
die "Test die message" 0
|
||||||
}
|
}
|
||||||
|
|
||||||
DEBUG=1 dbg "Test dbg message"
|
A_DEBUG=1 dbg "Test dbg message"
|
||||||
warn "Test warning message"
|
warn "Test warning message"
|
||||||
msg "Test msg message"
|
msg "Test msg message"
|
||||||
die "Test die message" 0
|
die "Test die message" 0
|
||||||
|
|
|
@ -6,6 +6,6 @@ set -e
|
||||||
|
|
||||||
cd $(dirname $0)
|
cd $(dirname $0)
|
||||||
for testscript in test???-*.sh; do
|
for testscript in test???-*.sh; do
|
||||||
echo -e "\nExecuting $testscript..." > /dev/stderr
|
echo -e "\nExecuting $testscript..." >> /dev/stderr
|
||||||
./$testscript
|
./$testscript
|
||||||
done
|
done
|
||||||
|
|
|
@ -23,10 +23,20 @@ test_cmd \
|
||||||
$INSTALLER_FILEPATH "not a version number"
|
$INSTALLER_FILEPATH "not a version number"
|
||||||
|
|
||||||
test_cmd \
|
test_cmd \
|
||||||
"The installer exits non-zero with a helpful message about an non-existant version" \
|
"The installer exits non-zero with a helpful message about an non-existent version" \
|
||||||
128 "fatal.+v99.99.99.*not found" \
|
128 "fatal.+v99.99.99.*not found" \
|
||||||
$INSTALLER_FILEPATH 99.99.99
|
$INSTALLER_FILEPATH 99.99.99
|
||||||
|
|
||||||
|
test_cmd \
|
||||||
|
"The installer successfully installs the oldest tag" \
|
||||||
|
0 "installer version 'v1.0.0'.+exec.+AUTOMATION_REPO_BRANCH=main.+Installation complete" \
|
||||||
|
$INSTALLER_FILEPATH 1.0.0
|
||||||
|
|
||||||
|
test_cmd \
|
||||||
|
"The oldest installed installer's default branch was modified" \
|
||||||
|
0 "" \
|
||||||
|
grep -Eqm1 '^AUTOMATION_REPO_BRANCH=.+main' "$INSTALL_PREFIX/automation/bin/$SUBJ_FILENAME"
|
||||||
|
|
||||||
test_cmd \
|
test_cmd \
|
||||||
"The installer detects incompatible future installer source version by an internal mechanism" \
|
"The installer detects incompatible future installer source version by an internal mechanism" \
|
||||||
10 "Error.+incompatible.+99.99.99" \
|
10 "Error.+incompatible.+99.99.99" \
|
||||||
|
@ -54,6 +64,11 @@ test_cmd \
|
||||||
0 "$(git describe HEAD)" \
|
0 "$(git describe HEAD)" \
|
||||||
cat "$INSTALL_PREFIX/automation/AUTOMATION_VERSION"
|
cat "$INSTALL_PREFIX/automation/AUTOMATION_VERSION"
|
||||||
|
|
||||||
|
test_cmd \
|
||||||
|
"The installer script doesn't redirect to 'stderr' anywhere." \
|
||||||
|
1 "" \
|
||||||
|
grep -q '> /dev/stderr' $INSTALLER_FILEPATH
|
||||||
|
|
||||||
load_example_environment() {
|
load_example_environment() {
|
||||||
local _args="$@"
|
local _args="$@"
|
||||||
# Don't disturb testing
|
# Don't disturb testing
|
||||||
|
@ -61,7 +76,7 @@ load_example_environment() {
|
||||||
source "$INSTALL_PREFIX/automation/environment" || return 99
|
source "$INSTALL_PREFIX/automation/environment" || return 99
|
||||||
echo "AUTOMATION_LIB_PATH ==> ${AUTOMATION_LIB_PATH:-UNDEFINED}"
|
echo "AUTOMATION_LIB_PATH ==> ${AUTOMATION_LIB_PATH:-UNDEFINED}"
|
||||||
echo "PATH ==> ${PATH:-EMPTY}"
|
echo "PATH ==> ${PATH:-EMPTY}"
|
||||||
[[ -z "$_args" ]] || DEBUG=1 $_args
|
[[ -z "$_args" ]] || A_DEBUG=1 $_args
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ for path_var in AUTOMATION_LIB_PATH AUTOMATION_ROOT SCRIPT_PATH; do
|
||||||
test_cmd "\$$path_var is defined and non-empty: ${!path_var}" \
|
test_cmd "\$$path_var is defined and non-empty: ${!path_var}" \
|
||||||
0 "" \
|
0 "" \
|
||||||
test -n "${!path_var}"
|
test -n "${!path_var}"
|
||||||
test_cmd "\$$path_var referrs to existing directory" \
|
test_cmd "\$$path_var refers to existing directory" \
|
||||||
0 "" \
|
0 "" \
|
||||||
test -d "${!path_var}"
|
test -d "${!path_var}"
|
||||||
done
|
done
|
||||||
|
|
|
@ -58,20 +58,20 @@ test_cmd "The indent function indents it's own output" \
|
||||||
0 "$EXPECTED_SUM" \
|
0 "$EXPECTED_SUM" \
|
||||||
bash -c "echo '$TEST_STRING' | indent | indent | sha256sum"
|
bash -c "echo '$TEST_STRING' | indent | indent | sha256sum"
|
||||||
|
|
||||||
DEBUG=0
|
A_DEBUG=0
|
||||||
test_cmd \
|
test_cmd \
|
||||||
"The dbg function has no output when \$DEBUG is zero and no message is given" \
|
"The dbg function has no output when \$A_DEBUG is zero and no message is given" \
|
||||||
0 "" \
|
0 "" \
|
||||||
dbg
|
dbg
|
||||||
|
|
||||||
test_cmd \
|
test_cmd \
|
||||||
"The dbg function has no output when \$DEBUG is zero and a test message is given" \
|
"The dbg function has no output when \$A_DEBUG is zero and a test message is given" \
|
||||||
0 "" \
|
0 "" \
|
||||||
dbg "$test_message_text"
|
dbg "$test_message_text"
|
||||||
|
|
||||||
DEBUG=1
|
A_DEBUG=1
|
||||||
basic_tests dbg 0 DEBUG
|
basic_tests dbg 0 DEBUG
|
||||||
DEBUG=0
|
A_DEBUG=0
|
||||||
|
|
||||||
test_cmd \
|
test_cmd \
|
||||||
"All primary output functions include the expected context information" \
|
"All primary output functions include the expected context information" \
|
||||||
|
@ -148,5 +148,20 @@ test_cmd \
|
||||||
|
|
||||||
unset env_vars SECRET_ENV_RE UPPERCASE super_secret nOrMaL_vAr
|
unset env_vars SECRET_ENV_RE UPPERCASE super_secret nOrMaL_vAr
|
||||||
|
|
||||||
|
test_cmd \
|
||||||
|
"The showrun function executes /bin/true as expected" \
|
||||||
|
0 "\+ /bin/true # \./testlib.sh:97 in test_cmd"\
|
||||||
|
showrun /bin/true
|
||||||
|
|
||||||
|
test_cmd \
|
||||||
|
"The showrun function executes /bin/false as expected" \
|
||||||
|
1 "\+ /bin/false # \./testlib.sh:97 in test_cmd"\
|
||||||
|
showrun /bin/false
|
||||||
|
|
||||||
|
test_cmd \
|
||||||
|
"The showrun function can call itself" \
|
||||||
|
0 "\+ /bin/true # .*console_output.sh:[0-9]+ in showrun" \
|
||||||
|
showrun showrun /bin/true
|
||||||
|
|
||||||
# script is set +e
|
# script is set +e
|
||||||
exit_with_status
|
exit_with_status
|
||||||
|
|
|
@ -20,24 +20,24 @@ test_ci() {
|
||||||
CI="$prev_CI"
|
CI="$prev_CI"
|
||||||
}
|
}
|
||||||
|
|
||||||
# DEBUG must default to 0 or non-zero
|
# A_DEBUG must default to 0 or non-zero
|
||||||
# usage: <expected non-zero> [initial_value]
|
# usage: <expected non-zero> [initial_value]
|
||||||
test_debug() {
|
test_debug() {
|
||||||
local exp_non_zero=$1
|
local exp_non_zero=$1
|
||||||
local init_value="$2"
|
local init_value="$2"
|
||||||
[[ -z "$init_value" ]] || \
|
[[ -z "$init_value" ]] || \
|
||||||
DEBUG=$init_value
|
A_DEBUG=$init_value
|
||||||
local desc_pfx="The \$DEBUG env. var initialized '$init_value', after loading library is"
|
local desc_pfx="The \$A_DEBUG env. var initialized '$init_value', after loading library is"
|
||||||
|
|
||||||
source "$TEST_DIR"/"$SUBJ_FILENAME"
|
source "$TEST_DIR"/"$SUBJ_FILENAME"
|
||||||
if ((exp_non_zero)); then
|
if ((exp_non_zero)); then
|
||||||
test_cmd "$desc_pfx non-zero" \
|
test_cmd "$desc_pfx non-zero" \
|
||||||
0 "" \
|
0 "" \
|
||||||
test "$DEBUG" -ne 0
|
test "$A_DEBUG" -ne 0
|
||||||
else
|
else
|
||||||
test_cmd "$desc_pfx zero" \
|
test_cmd "$desc_pfx zero" \
|
||||||
0 "" \
|
0 "" \
|
||||||
test "$DEBUG" -eq 0
|
test "$A_DEBUG" -eq 0
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,100 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Unit-tests for library script in the current directory
|
||||||
|
# Also verifies test script is derived from library filename
|
||||||
|
|
||||||
|
# shellcheck source-path=./
|
||||||
|
source $(dirname ${BASH_SOURCE[0]})/testlib.sh || exit 1
|
||||||
|
# Must be statically defined, 'source-path' directive can't work here.
|
||||||
|
# shellcheck source=../lib/platform.sh disable=SC2154
|
||||||
|
source "$TEST_DIR/$SUBJ_FILENAME" || exit 2
|
||||||
|
|
||||||
|
# For whatever reason, SCRIPT_PATH cannot be resolved.
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
test_cmd "Library $SUBJ_FILENAME is not executable" \
|
||||||
|
0 "" \
|
||||||
|
test ! -x "$SCRIPT_PATH/$SUBJ_FILENAME"
|
||||||
|
|
||||||
|
for var in OS_RELEASE_VER OS_RELEASE_ID OS_REL_VER; do
|
||||||
|
test_cmd "The variable \$$var is defined and non-empty" \
|
||||||
|
0 "" \
|
||||||
|
test -n "${!var}"
|
||||||
|
done
|
||||||
|
|
||||||
|
for var in OS_RELEASE_VER OS_REL_VER; do
|
||||||
|
NODOT=$(tr -d '.' <<<"${!var}")
|
||||||
|
test_cmd "The '.' character does not appear in \$$var" \
|
||||||
|
0 "" \
|
||||||
|
test "$NODOT" == "${!var}"
|
||||||
|
done
|
||||||
|
|
||||||
|
for OS_RELEASE_ID in 'debian' 'ubuntu'; do
|
||||||
|
(
|
||||||
|
export _TEST_UID=$RANDOM # Normally $UID is read-only
|
||||||
|
# Must be statically defined, 'source-path' directive can't work here.
|
||||||
|
# shellcheck source=../lib/platform.sh disable=SC2154
|
||||||
|
source "$TEST_DIR/$SUBJ_FILENAME" || exit 2
|
||||||
|
|
||||||
|
# The point of this test is to confirm it's defined
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
test_cmd "The '\$SUDO' env. var. is non-empty when \$_TEST_UID is non-zero" \
|
||||||
|
0 "" \
|
||||||
|
test -n "$SUDO"
|
||||||
|
|
||||||
|
test_cmd "The '\$SUDO' env. var. contains 'noninteractive' when '\$_TEST_UID' is non-zero" \
|
||||||
|
0 "noninteractive" \
|
||||||
|
echo "$SUDO"
|
||||||
|
)
|
||||||
|
done
|
||||||
|
|
||||||
|
test_cmd "The passthrough_envars() func. has output by default." \
|
||||||
|
0 ".+" \
|
||||||
|
passthrough_envars
|
||||||
|
|
||||||
|
(
|
||||||
|
# Confirm defaults may be overriden
|
||||||
|
PASSTHROUGH_ENV_EXACT="FOOBARBAZ"
|
||||||
|
PASSTHROUGH_ENV_ATSTART="FOO"
|
||||||
|
PASSTHROUGH_ENV_ANYWHERE="BAR"
|
||||||
|
export FOOBARBAZ="testing"
|
||||||
|
|
||||||
|
test_cmd "The passthrough_envars() func. w/ overriden expr. only prints name of test variable." \
|
||||||
|
0 "FOOBARBAZ" \
|
||||||
|
passthrough_envars
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test from a mostly empty environment to limit possibility of expr mismatch flakes
|
||||||
|
declare -a printed_envs
|
||||||
|
readarray -t printed_envs <<<$(env --ignore-environment PATH="$PATH" FOOBARBAZ="testing" \
|
||||||
|
SECRET_ENV_RE="(^PATH$)|(^BASH_FUNC)|(^_.*)|(FOOBARBAZ)|(SECRET_ENV_RE)" \
|
||||||
|
CI="true" AUTOMATION_LIB_PATH="/path/to/some/place" \
|
||||||
|
bash -c "source $TEST_DIR/$SUBJ_FILENAME && passthrough_envars")
|
||||||
|
|
||||||
|
test_cmd "The passthrough_envars() func. w/ overriden \$SECRET_ENV_RE hides test variable." \
|
||||||
|
1 "0" \
|
||||||
|
expr match "${printed_envs[*]}" '.*FOOBARBAZ.*'
|
||||||
|
|
||||||
|
test_cmd "The passthrough_envars() func. w/ overriden \$SECRET_ENV_RE returns CI variable." \
|
||||||
|
0 "[1-9]+[0-9]*" \
|
||||||
|
expr match "${printed_envs[*]}" '.*CI.*'
|
||||||
|
|
||||||
|
test_cmd "timebomb() function requires at least one argument" \
|
||||||
|
1 "must be UTC-based and of the form YYYYMMDD" \
|
||||||
|
timebomb
|
||||||
|
|
||||||
|
TZ=UTC12 \
|
||||||
|
test_cmd "timebomb() function ignores TZ and compares < UTC-forced current date" \
|
||||||
|
1 "TIME BOMB EXPIRED" \
|
||||||
|
timebomb $(TZ=UTC date +%Y%m%d)
|
||||||
|
|
||||||
|
test_cmd "timebomb() alerts user when no description given" \
|
||||||
|
1 "No reason given" \
|
||||||
|
timebomb 00010101
|
||||||
|
|
||||||
|
EXPECTED_REASON="test${RANDOM}test"
|
||||||
|
test_cmd "timebomb() gives reason when one was provided" \
|
||||||
|
1 "$EXPECTED_REASON" \
|
||||||
|
timebomb 00010101 "$EXPECTED_REASON"
|
||||||
|
|
||||||
|
# Must be last call
|
||||||
|
exit_with_status
|
|
@ -6,7 +6,7 @@
|
||||||
# Set non-zero to enable
|
# Set non-zero to enable
|
||||||
TEST_DEBUG=${TEST_DEBUG:-0}
|
TEST_DEBUG=${TEST_DEBUG:-0}
|
||||||
|
|
||||||
# Test subject filename and directory name are derrived from test-script filename
|
# Test subject filename and directory name are derived from test-script filename
|
||||||
SUBJ_FILENAME=$(basename $0)
|
SUBJ_FILENAME=$(basename $0)
|
||||||
if [[ "$SUBJ_FILENAME" =~ "testlib-" ]]; then
|
if [[ "$SUBJ_FILENAME" =~ "testlib-" ]]; then
|
||||||
SUBJ_FILENAME="${SUBJ_FILENAME#testlib-}"
|
SUBJ_FILENAME="${SUBJ_FILENAME#testlib-}"
|
||||||
|
@ -88,7 +88,7 @@ test_cmd() {
|
||||||
echo "# $@" > /dev/stderr
|
echo "# $@" > /dev/stderr
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Using egrep vs file safer than shell builtin test
|
# Using grep vs file safer than shell builtin test
|
||||||
local a_out_f=$(mktemp -p '' "tmp_${FUNCNAME[0]}_XXXXXXXX")
|
local a_out_f=$(mktemp -p '' "tmp_${FUNCNAME[0]}_XXXXXXXX")
|
||||||
local a_exit=0
|
local a_exit=0
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ test_cmd() {
|
||||||
if ((TEST_DEBUG)); then
|
if ((TEST_DEBUG)); then
|
||||||
echo "Received $(wc -l $a_out_f | awk '{print $1}') output lines of $(wc -c $a_out_f | awk '{print $1}') bytes total"
|
echo "Received $(wc -l $a_out_f | awk '{print $1}') output lines of $(wc -c $a_out_f | awk '{print $1}') bytes total"
|
||||||
fi
|
fi
|
||||||
if egrep -q "$e_out_re" "${a_out_f}.oneline"; then
|
if grep -Eq "$e_out_re" "${a_out_f}.oneline"; then
|
||||||
_test_report "Command $1 exited as expected with expected output" "0" "$a_out_f"
|
_test_report "Command $1 exited as expected with expected output" "0" "$a_out_f"
|
||||||
else
|
else
|
||||||
_test_report "Expecting regex '$e_out_re' match to (whitespace-squashed) output" "1" "$a_out_f"
|
_test_report "Expecting regex '$e_out_re' match to (whitespace-squashed) output" "1" "$a_out_f"
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"extends": [
|
||||||
|
"github>containers/automation//renovate/defaults.json5"
|
||||||
|
]
|
||||||
|
}
|
|
@ -1 +0,0 @@
|
||||||
Dockerfile
|
|
|
@ -1,26 +0,0 @@
|
||||||
FROM registry.fedoraproject.org/fedora-minimal:latest
|
|
||||||
RUN microdnf update -y && \
|
|
||||||
microdnf install -y coreutils curl gnupg2 git jq && \
|
|
||||||
microdnf clean all && \
|
|
||||||
rm -rf /var/cache/dnf
|
|
||||||
# Assume build is for development/manual testing purposes by default (automation should override with fixed version)
|
|
||||||
ARG INSTALL_AUTOMATION_VERSION=latest
|
|
||||||
ARG INSTALL_AUTOMATION_URI=https://github.com/containers/automation/releases/latest/download/install_automation.sh
|
|
||||||
ADD / /usr/src/automation
|
|
||||||
RUN if [[ "$INSTALL_AUTOMATION_VERSION" == "0.0.0" ]]; then \
|
|
||||||
env INSTALL_PREFIX=/usr/share \
|
|
||||||
/usr/src/automation/bin/install_automation.sh 0.0.0 ephemeral_gpg; \
|
|
||||||
else \
|
|
||||||
curl --silent --show-error --location \
|
|
||||||
--url "$INSTALL_AUTOMATION_URI" | env INSTALL_PREFIX=/usr/share \
|
|
||||||
/bin/bash -s - "$INSTALL_AUTOMATION_VERSION" ephemeral_gpg; \
|
|
||||||
fi
|
|
||||||
# Required environment variables
|
|
||||||
# TODO: use EPHEMERAL_CONTAINER value for something useful?
|
|
||||||
ENV GITHUB_ACTIONS="false" \
|
|
||||||
ACTIONS_STEP_DEBUG="false" \
|
|
||||||
PRIVATE_KEY_FILEPATH="" \
|
|
||||||
PRIVATE_PASSPHRASE_FILEPATH="" \
|
|
||||||
EPHEMERAL_CONTAINER=="1"
|
|
||||||
WORKDIR /root
|
|
||||||
ENTRYPOINT ["/bin/bash", "-c", "source /etc/profile && exec /usr/share/automation/bin/ephemeral_gpg.sh"]
|
|
|
@ -1,9 +0,0 @@
|
||||||
# Overview
|
|
||||||
|
|
||||||
This directory contains the necessary pieces to produce a container image
|
|
||||||
for executing gpg with an ephemeral home-directory and externally supplied
|
|
||||||
keyfiles. This is intended to protect the keyfiles and avoid persisting any
|
|
||||||
runtime daemons/background processes or their temporary files.
|
|
||||||
|
|
||||||
It is assumed the reader is familiar with gpg [and it's unattended
|
|
||||||
usage.](https://www.gnupg.org/documentation//manuals/gnupg/Unattended-Usage-of-GPG.html#Unattended-Usage-of-GPG)
|
|
|
@ -1,73 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
# Intended to be used by humans for debugging purposes. Drops the caller
|
|
||||||
# into a bash shell within a pre-configured ephemeral environment.
|
|
||||||
|
|
||||||
EPHEMERAL_GPG_LIB=$(dirname $(realpath "$0"))/../lib/ephemeral_gpg.sh
|
|
||||||
set -a
|
|
||||||
# Will be spawning interactive shell near the end, make sure it can access these functions
|
|
||||||
source "$EPHEMERAL_GPG_LIB"
|
|
||||||
set +a
|
|
||||||
|
|
||||||
##### MAIN() #####
|
|
||||||
|
|
||||||
msg "Setting up mock ephemeral directory, \$PRIVATE_KEY_FILEPATH and \$PRIVATE_PASSPHRASE_FILEPATH"
|
|
||||||
|
|
||||||
# These are required to pass verify_env_vars
|
|
||||||
export PRIVATE_KEY_FILEPATH=$(mktemp -p '' $(basename $(realpath "$SCRIPT_PATH/../"))_XXXX)
|
|
||||||
export PRIVATE_PASSPHRASE_FILEPATH=$(mktemp -p '' $(basename $(realpath "$SCRIPT_PATH/../"))_XXXX)
|
|
||||||
trap "rm -vf $PRIVATE_KEY_FILEPATH $PRIVATE_PASSPHRASE_FILEPATH" EXIT
|
|
||||||
# Nothing special here, mearly material for a passphrase
|
|
||||||
echo "$(basename $PRIVATE_KEY_FILEPATH)$RANDOM$(basename $PRIVATE_PASSPHRASE_FILEPATH)$RANDOM" | \
|
|
||||||
base64 -w0 | tr -d -c '[:alnum:]' > $PRIVATE_PASSPHRASE_FILEPATH
|
|
||||||
cp "$PRIVATE_PASSPHRASE_FILEPATH" "$PRIVATE_KEY_FILEPATH"
|
|
||||||
|
|
||||||
msg "Running verification checks"
|
|
||||||
verify_env_vars
|
|
||||||
|
|
||||||
go_ephemeral
|
|
||||||
|
|
||||||
msg "Generating quick-key (low-security) for experimental use."
|
|
||||||
# Adds an encr and signing subkeys by default
|
|
||||||
gpg_cmd --quick-generate-key 'Funky Tea Oolong <foo@bar.baz>' default default never
|
|
||||||
gpg_status_error_die
|
|
||||||
GPG_KEY_ID=$(print_cached_key)
|
|
||||||
set_default_keyid "$GPG_KEY_ID"
|
|
||||||
|
|
||||||
# These are not added by default
|
|
||||||
for usage in sign auth; do
|
|
||||||
msg "Generating '$usage' subkey"
|
|
||||||
gpg_cmd --quick-add-key "$GPG_KEY_ID" default $usage
|
|
||||||
gpg_status_error_die
|
|
||||||
done
|
|
||||||
|
|
||||||
msg "Enabling GPG signatures in git (Config file is $GNUPGHOME/gitconfig)"
|
|
||||||
configure_git_gpg
|
|
||||||
|
|
||||||
msg "Importing github public key and adding to keyring."
|
|
||||||
trust_github
|
|
||||||
|
|
||||||
msg "Entering shell within ephemeral environment, all library variables/functions are available for use."
|
|
||||||
msg "Notes:
|
|
||||||
* Dummy public and private keys have been generated with the ID
|
|
||||||
'$GPG_KEY_ID'.
|
|
||||||
* Git has been pre-configured to use the dummy key without entering any passwords.
|
|
||||||
* Reference the private-key passphrase as either \$_KEY_PASSPHRASE'
|
|
||||||
or '$_KEY_PASSPHRASE'.
|
|
||||||
* All normal shell commands can be used, in addition to all functions from
|
|
||||||
'$EPHEMERAL_GPG_LIB'.
|
|
||||||
* Enable additional debugging output at any time with 'export DEBUG=1'.
|
|
||||||
* Both \$HOME and \$PWD are now an ephemeral/temporary directory which will be removed upon leaving the shell.
|
|
||||||
"
|
|
||||||
|
|
||||||
# Setup to run inside a debugging "shell", where it's environment mimics the ephemeral environment
|
|
||||||
cd $GNUPGHOME
|
|
||||||
cp -a /etc/skel/.??* $GNUPGHOME/ # $HOME will be set here, make sure we overwrite any git/gpg settings
|
|
||||||
rm -f $GNUPGHOME/.bash_logout # don't clear screen on exit
|
|
||||||
|
|
||||||
# In a debugging use-case only, un-unset $_KEY_PASSPHRASE inside ephemeral_env (we're using a dummy key anyway)
|
|
||||||
ephemeral_env env _KEY_PASSPHRASE="$_KEY_PASSPHRASE" /bin/bash --login --norc -i
|
|
||||||
cd - &> /dev/null
|
|
||||||
dbg "Removing ephemeral environment"
|
|
|
@ -1,101 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
# Execute gpg with an ephemeral home-directory and externally supplied
|
|
||||||
# key details. This is intended to protect sensitive bits by avoiding
|
|
||||||
# persisting any runtime daemons/background processes or temporary files.
|
|
||||||
# Allowing gpg and/or git commands to be executed inside a volume-mounted
|
|
||||||
# workdir using a consistent and repeatable environment.
|
|
||||||
#
|
|
||||||
# Ref: https://www.gnupg.org/documentation//manuals/gnupg/Unattended-Usage-of-GPG.html#Unattended-Usage-of-GPG
|
|
||||||
|
|
||||||
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/../lib/$(basename "${BASH_SOURCE[0]}")
|
|
||||||
|
|
||||||
# Documented/intended normal behavior to protect keys at rest
|
|
||||||
safe_keyfile() {
|
|
||||||
# Validated by verify_env_vars()
|
|
||||||
if ((TRUNCATE_KEY_ON_READ)); then
|
|
||||||
dbg "Truncating \$PRIVATE_KEY_FILEPATH useless after import."
|
|
||||||
truncate --size=0 "$PRIVATE_KEY_FILEPATH"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Scan file, extract FIRST ascii-armor encoded private-key ONLY
|
|
||||||
first_private_key() {
|
|
||||||
file="$1"
|
|
||||||
[[ -n "$file" ]] || \
|
|
||||||
die "Expecting path to file as first argument"
|
|
||||||
dbg "Importing the first private-key encountered in '$file'"
|
|
||||||
awk -r -e '
|
|
||||||
BEGIN {
|
|
||||||
got_start=0;
|
|
||||||
got_end=0;
|
|
||||||
}
|
|
||||||
/-----BEGIN.+PRIVATE/ {
|
|
||||||
if (got_end == 1) exit 1;
|
|
||||||
got_start=1;
|
|
||||||
}
|
|
||||||
/-----END.+PRIVATE/ {
|
|
||||||
if (got_start == 0) exit 2;
|
|
||||||
got_end=1;
|
|
||||||
print $0;
|
|
||||||
}
|
|
||||||
{
|
|
||||||
if (got_start == 1 && got_end == 0) print $0; else next;
|
|
||||||
}
|
|
||||||
' "$file"
|
|
||||||
}
|
|
||||||
|
|
||||||
##### MAIN() #####
|
|
||||||
|
|
||||||
dbg "Validating required environment variables and values"
|
|
||||||
verify_env_vars
|
|
||||||
dbg "Entering ephemeral environment"
|
|
||||||
# Create a $GNUPGHOME and arrange for it's destruction upon exit
|
|
||||||
go_ephemeral
|
|
||||||
|
|
||||||
# The incoming key file may have an arbitrary number of public
|
|
||||||
# and private keys, in an arbitrary order. For configuration
|
|
||||||
# and trust purposes, we must obtain exactly one primary secret
|
|
||||||
# key's ID. While we're at it, import and clean/fix the key
|
|
||||||
# into a new keyring.
|
|
||||||
first_private_key "$PRIVATE_KEY_FILEPATH" | \
|
|
||||||
gpg_cmd --import --import-options import-local-sigs,no-import-clean,import-restore
|
|
||||||
gpg_status_error_die
|
|
||||||
|
|
||||||
# Grab reference to the ID of the primary secret key imported above
|
|
||||||
GPG_KEY_ID=$(print_cached_key)
|
|
||||||
|
|
||||||
# For all future gpg commands, reference this key as the default
|
|
||||||
set_default_keyid $GPG_KEY_ID
|
|
||||||
|
|
||||||
# Imported keys have an 'untrusted' attribute assigned by default
|
|
||||||
dbg "Marking imported private-key as ultimately trusted and valid"
|
|
||||||
# Under non-debugging situations ignore all the output
|
|
||||||
dbg $(gpg_cmd --command-fd 0 --edit-key "$GPG_KEY_ID" <<<"
|
|
||||||
trust
|
|
||||||
5
|
|
||||||
y
|
|
||||||
enable
|
|
||||||
save
|
|
||||||
")
|
|
||||||
# Exit if there was any error
|
|
||||||
gpg_status_error_die
|
|
||||||
|
|
||||||
dbg "Importing remaining keys in \$PRIVATE_KEY_FILEPATH '$PRIVATE_KEY_FILEPATH'"
|
|
||||||
# Don't clobber the alrady imported and trusted primary key "$GPG_KEY_ID
|
|
||||||
gpg_cmd --import --import-options keep-ownertrust,import-clean <"$PRIVATE_KEY_FILEPATH"
|
|
||||||
gpg_status_error_die
|
|
||||||
# Assume it is desireable to protect data-at-rest as much as possible
|
|
||||||
safe_keyfile
|
|
||||||
|
|
||||||
# This allows validating any appearance of this key in the commit log
|
|
||||||
dbg "Importing and trusting Github's merge-commit signing key"
|
|
||||||
trust_github
|
|
||||||
dbg "Configuring unattended gpg use by git"
|
|
||||||
configure_git_gpg
|
|
||||||
|
|
||||||
# Execute the desired command/arguments from the command-line, inside prepared environment
|
|
||||||
ephemeral_env "$@"
|
|
||||||
exit $?
|
|
|
@ -1,8 +0,0 @@
|
||||||
|
|
||||||
# This library simply sources the necessary common libraries.
|
|
||||||
# Not intended for direct execution
|
|
||||||
AUTOMATION_LIB_PATH="${AUTOMATION_LIB_PATH:-$(dirname $(realpath ${BASH_SOURCE[0]}))/../../common/lib}"
|
|
||||||
source "$AUTOMATION_LIB_PATH/defaults.sh"
|
|
||||||
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
|
||||||
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
|
||||||
source "$AUTOMATION_LIB_PATH/utils.sh"
|
|
|
@ -1,398 +0,0 @@
|
||||||
|
|
||||||
# Library of constants and functions for the ephemeral_gpg scripts and tests
|
|
||||||
# Not intended to be executed directly.
|
|
||||||
|
|
||||||
LIBRARY_DIRPATH=$(dirname $(realpath "${BASH_SOURCE[0]}"))
|
|
||||||
source "$LIBRARY_DIRPATH/common.sh"
|
|
||||||
|
|
||||||
# Executing inside a container (TODO: not used)
|
|
||||||
EPHEMERAL_CONTAINER="${EPHEMERAL_CONTAINER:-0}"
|
|
||||||
|
|
||||||
# Path to script template rendered by configure_git_gpg()
|
|
||||||
GIT_UNATTENDED_GPG_TEMPLATE="$LIBRARY_DIRPATH/git_unattended_gpg.sh.in"
|
|
||||||
|
|
||||||
# In case a bash prompt is presented, identify the environment
|
|
||||||
EPHEMERAL_ENV_PROMPT_DIRTRIM=2
|
|
||||||
EPHEMERAL_ENV_PS1='\e[0m[\e[0;37;41mEPHEMERAL\e[0m \e[1;34m\w\e[0m]\e[1;36m\$\e[0m '
|
|
||||||
|
|
||||||
# If for some future/unknown reason, input keys and passphrase files
|
|
||||||
# should NOT be truncated after read, set these to 0.
|
|
||||||
TRUNCATE_KEY_ON_READ=1
|
|
||||||
TRUNCATE_PASSPHRASE_ON_READ=1
|
|
||||||
|
|
||||||
# Machine parse-able status will be written here
|
|
||||||
# Empty-files have special-meanings to gpg, detect them to help debugging
|
|
||||||
MIN_INPUT_FILE_SIZE=8 # bytes
|
|
||||||
|
|
||||||
# Ref: https://help.github.com/en/github/authenticating-to-github/about-commit-signature-verification
|
|
||||||
GH_PUB_KEY_ID="4AEE18F83AFDEB23"
|
|
||||||
# Don't rely on internet access to download the key
|
|
||||||
GH_PUB_KEY="-----BEGIN PGP PUBLIC KEY BLOCK-----
|
|
||||||
|
|
||||||
mQENBFmUaEEBCACzXTDt6ZnyaVtueZASBzgnAmK13q9Urgch+sKYeIhdymjuMQta
|
|
||||||
x15OklctmrZtqre5kwPUosG3/B2/ikuPYElcHgGPL4uL5Em6S5C/oozfkYzhwRrT
|
|
||||||
SQzvYjsE4I34To4UdE9KA97wrQjGoz2Bx72WDLyWwctD3DKQtYeHXswXXtXwKfjQ
|
|
||||||
7Fy4+Bf5IPh76dA8NJ6UtjjLIDlKqdxLW4atHe6xWFaJ+XdLUtsAroZcXBeWDCPa
|
|
||||||
buXCDscJcLJRKZVc62gOZXXtPfoHqvUPp3nuLA4YjH9bphbrMWMf810Wxz9JTd3v
|
|
||||||
yWgGqNY0zbBqeZoGv+TuExlRHT8ASGFS9SVDABEBAAG0NUdpdEh1YiAod2ViLWZs
|
|
||||||
b3cgY29tbWl0IHNpZ25pbmcpIDxub3JlcGx5QGdpdGh1Yi5jb20+iQEiBBMBCAAW
|
|
||||||
BQJZlGhBCRBK7hj4Ov3rIwIbAwIZAQAAmQEH/iATWFmi2oxlBh3wAsySNCNV4IPf
|
|
||||||
DDMeh6j80WT7cgoX7V7xqJOxrfrqPEthQ3hgHIm7b5MPQlUr2q+UPL22t/I+ESF6
|
|
||||||
9b0QWLFSMJbMSk+BXkvSjH9q8jAO0986/pShPV5DU2sMxnx4LfLfHNhTzjXKokws
|
|
||||||
+8ptJ8uhMNIDXfXuzkZHIxoXk3rNcjDN5c5X+sK8UBRH092BIJWCOfaQt7v7wig5
|
|
||||||
4Ra28pM9GbHKXVNxmdLpCFyzvyMuCmINYYADsC848QQFFwnd4EQnupo6QvhEVx1O
|
|
||||||
j7wDwvuH5dCrLuLwtwXaQh0onG4583p0LGms2Mf5F+Ick6o/4peOlBoZz48=
|
|
||||||
=Bvzs
|
|
||||||
-----END PGP PUBLIC KEY BLOCK-----
|
|
||||||
"
|
|
||||||
|
|
||||||
# E-mail addresses are complex to match perfectly, assume this is good enough
|
|
||||||
FULL_NAME_RX='^([[:graph:] ]+)+' # e.x. First Middle-Initial. Last (Comment) <user@example.com>
|
|
||||||
EMAIL_RX='[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]+'
|
|
||||||
FULL_NAME_EMAIL_RX="${FULL_NAME_RX}\B<${EMAIL_RX}>"
|
|
||||||
|
|
||||||
# Intentionally blank, this is set by calling set_default_keyid()
|
|
||||||
_KEY_CACHE_FN=""
|
|
||||||
_DEF_KEY_ID=""
|
|
||||||
_DEF_KEY_ARG=""
|
|
||||||
_KEY_PASSPHRASE=""
|
|
||||||
_EPHEMERAL_ENV_EXIT=0
|
|
||||||
|
|
||||||
# Used by get_???_key_id functions
|
|
||||||
_KEY_COMMON_RX='u:[[:digit:]]+:[[:digit:]]+:[[:alnum:]]+:[[:digit:]]+:+u?:+'
|
|
||||||
|
|
||||||
# These variables either absolutely must not, or simply should not
|
|
||||||
# pass through to commands executed beneith the ephemeral environment
|
|
||||||
_UNSET_VARS=( \
|
|
||||||
EMAIL_RX
|
|
||||||
EPHEMERAL_ENV_PROMPT_DIRTRIM
|
|
||||||
FULL_NAME_EMAIL_RX
|
|
||||||
FULL_NAME_RX
|
|
||||||
GH_PUB_KEY
|
|
||||||
GH_PUB_KEY_ID
|
|
||||||
MKTEMP_FORMAT
|
|
||||||
PRIVATE_KEY_FILEPATH
|
|
||||||
PRIVATE_PASSPHRASE_FILEPATH
|
|
||||||
TRUNCATE_KEY_ON_READ
|
|
||||||
TRUNCATE_PASSPHRASE_ON_READ
|
|
||||||
_BOOKENDS_ESCAPED_SED_EXP
|
|
||||||
_BOOKENDS_SED_EXP
|
|
||||||
_DEF_KEY_ID
|
|
||||||
_EPHEMERAL_ENV_EXIT
|
|
||||||
_KEY_COMMON_RX
|
|
||||||
_KEY_PASSPHRASE
|
|
||||||
_UNSET_VARS
|
|
||||||
)
|
|
||||||
|
|
||||||
verify_env_vars() {
|
|
||||||
local env_var_name
|
|
||||||
for kind in KEY PASSPHRASE; do
|
|
||||||
case "$kind" in
|
|
||||||
KEY)
|
|
||||||
env_var_name=PRIVATE_KEY_FILEPATH
|
|
||||||
trunc_var_name=TRUNCATE_KEY_ON_READ
|
|
||||||
;;
|
|
||||||
PASSPHRASE)
|
|
||||||
env_var_name=PRIVATE_PASSPHRASE_FILEPATH
|
|
||||||
trunc_var_name=TRUNCATE_PASSPHRASE_ON_READ
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
die "Unsupported/Unknown \$kind '$kind'."
|
|
||||||
esac
|
|
||||||
|
|
||||||
dbg "Checking \$${env_var_name} '${!env_var_name}':"
|
|
||||||
dbg $(ls -la "${!env_var_name}" || true)
|
|
||||||
|
|
||||||
[[ -n "${!env_var_name}" ]] || \
|
|
||||||
die "Expecting \$$env_var_name to not be empty/blank" 2
|
|
||||||
|
|
||||||
[[ -f "${!env_var_name}" ]] || \
|
|
||||||
die "Expecting readable \$$env_var_name file, got '${!env_var_name}'" 2
|
|
||||||
|
|
||||||
# The '-w' test always passes for root, must look at actual permissions
|
|
||||||
dbg "Found \$$trunc_var_name '${!trunc_var_name}'"
|
|
||||||
if [[ ${!trunc_var_name} -ne 0 ]] && stat --format=%A "${!env_var_name}" | egrep -qv '^-rw'; then
|
|
||||||
die "The file referenced in \$$env_var_name must be writable if \$$trunc_var_name is '${!trunc_var_name}'"
|
|
||||||
else
|
|
||||||
dbg "The file "${!env_var_name}" is writeable)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if (($(stat "--format=%s" "${!env_var_name}")<$MIN_INPUT_FILE_SIZE)); then
|
|
||||||
die "The file '${!env_var_name}' must be larger than $MIN_INPUT_FILE_SIZE bytes."
|
|
||||||
fi
|
|
||||||
|
|
||||||
dbg "\$${env_var_name} appears fine for use."
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
# Setup environment required for non-interactive secure use of gpg_cmd()
|
|
||||||
go_ephemeral() {
|
|
||||||
# Security-note: This is not perfectly safe, and it can't be in any practical way
|
|
||||||
# with a shell-script. It simply ensures the key is only exposed in memory of the
|
|
||||||
# this shell process and not stored on disk in an otherwise known/discoverable location.
|
|
||||||
_KEY_PASSPHRASE="$(<$PRIVATE_PASSPHRASE_FILEPATH)"
|
|
||||||
if ((TRUNCATE_PASSPHRASE_ON_READ)); then
|
|
||||||
truncate --size=0 "$PRIVATE_PASSPHRASE_FILEPATH"
|
|
||||||
fi
|
|
||||||
|
|
||||||
export GNUPGHOME=$(mktemp -p '' -d $MKTEMP_FORMAT)
|
|
||||||
chmod 0700 "$GNUPGHOME"
|
|
||||||
dbg "Created '$GNUPGHOME' as \$GNUPGHOME, will be removed upon exit."
|
|
||||||
trap "rm -rf $GNUPGHOME" EXIT
|
|
||||||
dbg "Using \$GNUPGHOME $GNUPGHOME"
|
|
||||||
|
|
||||||
# Needed for error-checking and KEY ID caching
|
|
||||||
export GPG_STATUS_FILEPATH=$GNUPGHOME/gpg.status
|
|
||||||
# Must use a file not a variable for this, unit-tests execute in a subshell and a var would not persist.
|
|
||||||
_KEY_CACHE_FN=$GNUPGHOME/.keycache
|
|
||||||
touch "$_KEY_CACHE_FN"
|
|
||||||
touch "$GPG_STATUS_FILEPATH"
|
|
||||||
|
|
||||||
# Don't allow any default pass-through env. vars to leak from outside environment
|
|
||||||
local default_env_vars=$(gpg-connect-agent --quiet 'getinfo std_env_names' /bye | \
|
|
||||||
tr -d '\000' | awk --sandbox '$1=="D" {print $2}' | \
|
|
||||||
egrep -iv 'term')
|
|
||||||
dbg "Force-clearing "$default_env_vars
|
|
||||||
unset $default_env_vars
|
|
||||||
|
|
||||||
# gpg_cmd() checks for this to indicate function was called at least once
|
|
||||||
touch "$GNUPGHOME/.ephemeral"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Execute arguments in a sanitized environment
|
|
||||||
ephemeral_env() {
|
|
||||||
local args="$@"
|
|
||||||
# quoted @ is special-case substitution
|
|
||||||
dbg "Entering ephemeral environment for command execution: '$args'"
|
|
||||||
local gpg_key_uid="$(get_key_uid $_DEF_KEY_ID)"
|
|
||||||
local unsets=$(for us in "${_UNSET_VARS[@]}"; do echo "--unset=$us"; done)
|
|
||||||
cd $GNUPGHOME
|
|
||||||
env --default-signal \
|
|
||||||
${unsets[@]} \
|
|
||||||
DEBUG="$DEBUG" \
|
|
||||||
TEST_DEBUG="$TEST_DEBUG" \
|
|
||||||
PROMPT_DIRTRIM="$EPHEMERAL_ENV_PROMPT_DIRTRIM" \
|
|
||||||
GNUPGHOME="$GNUPGHOME" \
|
|
||||||
HOME="$GNUPGHOME" \
|
|
||||||
GPG_KEY_ID="$_DEF_KEY_ID" \
|
|
||||||
GPG_KEY_UID="$gpg_key_uid" \
|
|
||||||
GPG_TTY="$(tty)" \
|
|
||||||
HISTFILE="$HISTFILE" \
|
|
||||||
HOME="$GNUPGHOME" \
|
|
||||||
PS1="$EPHEMERAL_ENV_PS1" \
|
|
||||||
"$@" || _EPHEMERAL_ENV_EXIT=$?
|
|
||||||
cd - &> /dev/null
|
|
||||||
dbg "Leaving ephemeral environment after command exit '$_EPHEMERAL_ENV_EXIT'"
|
|
||||||
return $_EPHEMERAL_ENV_EXIT
|
|
||||||
}
|
|
||||||
|
|
||||||
# Snag key IDs and hashes from common operations, assuming reverse order relevancy
|
|
||||||
# N/B: NO error checking or validation is performed
|
|
||||||
cache_status_key() {
|
|
||||||
[[ -r "$_KEY_CACHE_FN" ]] || \
|
|
||||||
die "Expecting prior call to go_ephemeral() function"
|
|
||||||
local awk_script='
|
|
||||||
/ ERROR /{exit}
|
|
||||||
/ KEY_CREATED /{print $4; exit}
|
|
||||||
/ KEY_CONSIDERED /{print $3; exit}
|
|
||||||
/ EXPORTED /{print $3; exit}
|
|
||||||
/ IMPORT_OK /{print $4; exit}
|
|
||||||
'
|
|
||||||
local cache="$(tac $GPG_STATUS_FILEPATH | awk -e "$awk_script")"
|
|
||||||
if [[ -n "$cache" ]]; then
|
|
||||||
dbg "Caching '$cache' in '$_KEY_CACHE_FN'"
|
|
||||||
echo -n "$cache" > "$_KEY_CACHE_FN"
|
|
||||||
else
|
|
||||||
dbg "Clearing cache in '$_KEY_CACHE_FN'"
|
|
||||||
truncate --size 0 "$_KEY_CACHE_FN"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
print_cached_key() {
|
|
||||||
[[ -r "$_KEY_CACHE_FN" ]] || \
|
|
||||||
die "Expecting prior call to go_ephemeral() function"
|
|
||||||
local cache=$(<"$_KEY_CACHE_FN")
|
|
||||||
if [[ -n "$cache" ]]; then
|
|
||||||
dbg "Found cached key '$cache'"
|
|
||||||
echo "$cache" > /dev/stdout
|
|
||||||
else
|
|
||||||
# Be helpful to callers with a warning, assume they were not expecting the cache to be empty/cleared.
|
|
||||||
warn "Empty key cache '$_KEY_CACHE_FN' encountered in call from ${BASH_SOURCE[2]}:${BASH_LINENO[1]}"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Execute gpg batch command with secure passphrase
|
|
||||||
# N/B: DOES NOT die() ON ERROR, CALLER MUST CHECK RETURN STATUS FILE
|
|
||||||
gpg_cmd() {
|
|
||||||
args="$@"
|
|
||||||
[[ -n "$args" ]] || \
|
|
||||||
die "Expecting one or more gpg arguments as function parameters"
|
|
||||||
[[ -r "$GNUPGHOME/.ephemeral" ]] || \
|
|
||||||
die "The go_ephemeral() function must be used before calling ${FUNCNAME[0]}()"
|
|
||||||
[[ ${#_KEY_PASSPHRASE} -gt $MIN_INPUT_FILE_SIZE ]] || \
|
|
||||||
die "Bug: Passphrase not found in \$_KEY_PASSPHRASE"
|
|
||||||
local harmless_warning_rx='^gpg: WARNING: standard input reopened.*'
|
|
||||||
local future_algo="ed25519/cert,sign+cv25519/encr"
|
|
||||||
local cmd="gpg --quiet --batch --with-colons \
|
|
||||||
--status-file $GPG_STATUS_FILEPATH \
|
|
||||||
--pinentry-mode loopback --passphrase-fd 42 \
|
|
||||||
--trust-model tofu+pgp --tofu-default-policy good \
|
|
||||||
--default-new-key-algo $future_algo \
|
|
||||||
$_DEF_KEY_ARG --keyid-format LONG"
|
|
||||||
dbg "Resetting status file $GNUPGHOME/gpg.status contents"
|
|
||||||
dbg "+ $cmd $@"
|
|
||||||
# Execute gpg command, but filter harmless/annoying warning message for testing consistency
|
|
||||||
$ephemeral_env $cmd "$@" 42<<<"$_KEY_PASSPHRASE" |& \
|
|
||||||
sed -r -e "s/$harmless_warning_rx//g" || true
|
|
||||||
dbg "gpg command exited $?"
|
|
||||||
dbg "gpg status after command:
|
|
||||||
$(<$GPG_STATUS_FILEPATH)
|
|
||||||
"
|
|
||||||
cache_status_key
|
|
||||||
}
|
|
||||||
|
|
||||||
# Exit with an error if gpg_cmd() call indicates an error in the status file
|
|
||||||
gpg_status_error_die() {
|
|
||||||
local last_status=$(tail -1 "$GPG_STATUS_FILEPATH")
|
|
||||||
if egrep -i -q 'ERROR' "$GPG_STATUS_FILEPATH"; then
|
|
||||||
die "gpg ERROR status found:
|
|
||||||
$last_status
|
|
||||||
"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
_verify_key_exists() {
|
|
||||||
local keyid="$1"
|
|
||||||
[[ -n "$keyid" ]] || \
|
|
||||||
die "Expecting a key-id as the first parameter"
|
|
||||||
local output=$(gpg_cmd --list-keys "$keyid" 2>&1)
|
|
||||||
if egrep -i -q 'error reading key' <<<"$output"; then
|
|
||||||
die "Non-existing key '$keyid'; gpg output:
|
|
||||||
$output"
|
|
||||||
else
|
|
||||||
gpg_status_error_die
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Github signs merge commits using this key, trust it to keep git happy
|
|
||||||
trust_github() {
|
|
||||||
dbg "Importing Github's merge-commit signing key"
|
|
||||||
gpg_cmd --import <<<"$GH_PUB_KEY"
|
|
||||||
gpg_status_error_die
|
|
||||||
_verify_key_exists "$GH_PUB_KEY_ID"
|
|
||||||
}
|
|
||||||
|
|
||||||
set_default_keyid() {
|
|
||||||
local keyid="$1"
|
|
||||||
_verify_key_exists $keyid
|
|
||||||
dbg "Setting default GPG key to ID $keyid"
|
|
||||||
_DEF_KEY_ID="$keyid"
|
|
||||||
_DEF_KEY_ARG="--default-key $keyid"
|
|
||||||
}
|
|
||||||
|
|
||||||
_get_sec_key_id() {
|
|
||||||
local keyid="$1"
|
|
||||||
local line_re="$2"
|
|
||||||
_verify_key_exists $keyid
|
|
||||||
# Double --with-fingerprint is intentional
|
|
||||||
listing=$(gpg_cmd --fixed-list-mode --with-fingerprint --with-fingerprint --list-secret-keys $keyid)
|
|
||||||
gpg_status_error_die
|
|
||||||
dbg "Searching for key matching regex '$line_re'"
|
|
||||||
awk --field-separator ':' --sandbox -e "/$line_re/"'{print $5}' <<<"$listing"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Usage-note: The purpose-build sub-keys are preferred to using the main key,
|
|
||||||
# since they are more easily replaced. This one is not that, it is
|
|
||||||
# simply the ID of the secret part of the primary key (i.e. probably
|
|
||||||
# not what you want to be using on a regular basis).
|
|
||||||
get_sec_key_id() {
|
|
||||||
# Format Ref: /usr/share/doc/gnupg2/DETAILS (field 5 is the key ID)
|
|
||||||
# N/B: The 'scESCA' (in any order) near the end is REALLY important, esp. to verify does not have a 'd'
|
|
||||||
_get_sec_key_id "$1" "^sec:${_KEY_COMMON_RX}:[scESCA]+:"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_enc_key_id() {
|
|
||||||
_get_sec_key_id "$1" "^ssb:${_KEY_COMMON_RX}:e:"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_sig_key_id() {
|
|
||||||
_get_sec_key_id "$1" "^ssb:${_KEY_COMMON_RX}:s:"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_auth_key_id() {
|
|
||||||
_get_sec_key_id "$1" "^ssb:${_KEY_COMMON_RX}:a:"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_key_uid() {
|
|
||||||
local keyid="$1"
|
|
||||||
_verify_key_exists $keyid
|
|
||||||
# Added keys appear in reverse-chronological order, search oldest-first.
|
|
||||||
local keys=$(gpg_cmd --fixed-list-mode --with-fingerprint --with-fingerprint --list-keys $keyid | tac)
|
|
||||||
gpg_status_error_die
|
|
||||||
dbg "Searching for UID subkey in $keyid:"
|
|
||||||
dbg "
|
|
||||||
$keys
|
|
||||||
"
|
|
||||||
local uid_string
|
|
||||||
# Format Ref: /usr/share/doc/gnupg2/DETAILS (field 10 is the UID string)
|
|
||||||
awk --field-separator ':' --sandbox -e '/uid/{print $10}' <<<"$keys" | \
|
|
||||||
while read uid_string; do
|
|
||||||
dbg "Considering '$uid_string'"
|
|
||||||
if egrep -Eioqm1 "${FULL_NAME_EMAIL_RX}" <<<"$uid_string"; then
|
|
||||||
dbg "It matches regex!"
|
|
||||||
echo "$uid_string"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
git_config_ephemeral() {
|
|
||||||
local args="$@"
|
|
||||||
[[ -n "$args" ]] || \
|
|
||||||
die "Expecting git config arguments as parameters"
|
|
||||||
# Be nice to developers, don't trash their configuration and
|
|
||||||
# also avoid interfering with other system/user configuration
|
|
||||||
dbg "Configuring '$args' in \$GNUPGHOME/gitconfig"
|
|
||||||
git config --file $GNUPGHOME/gitconfig "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
configure_git_gpg() {
|
|
||||||
local optional_keyid="$1" # needed for unit-testing
|
|
||||||
[[ -z "$optional_keyid" ]] ||
|
|
||||||
set_default_keyid "$optional_keyid"
|
|
||||||
# Required for obtaining the UID info and the sig subkey
|
|
||||||
[[ -n "$_DEF_KEY_ID" ]] || \
|
|
||||||
die "No default key has been set, call set_default_keyid() <ID> first."
|
|
||||||
[[ -r "$GIT_UNATTENDED_GPG_TEMPLATE" ]] || \
|
|
||||||
die "Could not read template file '$GIT_UNATTENDED_GPG_TEMPLATE'"
|
|
||||||
local uid_string=$(get_key_uid "$_DEF_KEY_ID")
|
|
||||||
[[ -n "$uid_string" ]] || \
|
|
||||||
die "Expected non-empty uid string using the format:: <full name> <'<'e-mail address'>'>"
|
|
||||||
local email=$(egrep -Eiom1 "$EMAIL_RX" <<<$uid_string)
|
|
||||||
local full_name=$(egrep -Eiom1 "$FULL_NAME_RX" <<<$uid_string | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')
|
|
||||||
|
|
||||||
dbg "Parsed uid record string into '$full_name' first/last and '$email' email"
|
|
||||||
git_config_ephemeral user.name "$full_name"
|
|
||||||
git_config_ephemeral user.email "$email"
|
|
||||||
git_config_ephemeral user.signingkey $(get_sig_key_id $_DEF_KEY_ID)
|
|
||||||
git_config_ephemeral commit.gpgsign true
|
|
||||||
git_config_ephemeral tag.gpgSign true
|
|
||||||
git_config_ephemeral log.showSignature true
|
|
||||||
# Make active for general use, assuming they have \$HOME set properly
|
|
||||||
ln -sf $GNUPGHOME/gitconfig $GNUPGHOME/.gitconfig
|
|
||||||
|
|
||||||
# Necessary so git doesn't prompt for passwords
|
|
||||||
local unattended_script=$(mktemp -p "$GNUPGHOME" ....XXXX)
|
|
||||||
dbg "Rendering unattended gpg passphrase supply script '$unattended_script'"
|
|
||||||
# Security note: Any git commands will async. call into gpg, possibly
|
|
||||||
# in the future. Therefor we must provide the passphrase for git's use,
|
|
||||||
# otherwise an interaction would be required. Relying on the
|
|
||||||
# random script filename and a kernel session keyring with an
|
|
||||||
# obfuscated base64 encoded passphrase is about as good as can be had.
|
|
||||||
local _shit=$'#\a#\a#\a#\a#\a#'
|
|
||||||
local _obfsctd_b64_kp=$(printf '%q' "$_shit")$(base64 -w0 <<<"$_KEY_PASSPHRASE")$(printf '%q' "$_shit")
|
|
||||||
sed -r -e "s/@@@@@ SUBSTITUTION TOKEN @@@@@/${_obfsctd_b64_kp}/" \
|
|
||||||
"$GIT_UNATTENDED_GPG_TEMPLATE" > "$unattended_script"
|
|
||||||
chmod 0700 "$unattended_script"
|
|
||||||
git_config_ephemeral gpg.program "$unattended_script"
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
git_unattended_gpg.sh.in
|
|
|
@ -1,16 +0,0 @@
|
||||||
#!/bin/bash -e
|
|
||||||
|
|
||||||
# This is a template for a generated script that feeds a private
|
|
||||||
# passphrase from the kernel to gpg, on behalf of an asynchronous
|
|
||||||
# call from git. Not intended to be executed directly.
|
|
||||||
|
|
||||||
_obfsctd_b64_kp='@@@@@ SUBSTITUTION TOKEN @@@@@'
|
|
||||||
|
|
||||||
# Interpret variables/substitutions at runtime
|
|
||||||
(
|
|
||||||
env --unset=_obfsctd_b64_kp \
|
|
||||||
gpg --quiet --batch --no-tty --pinentry-mode loopback \
|
|
||||||
--passphrase-fd 43 \
|
|
||||||
--trust-model tofu+pgp --tofu-default-policy good \
|
|
||||||
"$@"
|
|
||||||
) 43<<<$(base64 -d --ignore-garbage <<<"$_obfsctd_b64_kp")
|
|
|
@ -1,36 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Load standardized test harness
|
|
||||||
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/testlib.sh || exit 1
|
|
||||||
|
|
||||||
# Must go through the top-level install script that chains to ../.install.sh
|
|
||||||
INSTALL_SCRIPT=$(realpath "$TEST_DIR/../../bin/install_automation.sh")
|
|
||||||
TEMPDIR=$(mktemp -p "" -d "tmpdir_ephemeral_gpg_XXXXX")
|
|
||||||
trap "rm -rf $TEMPDIR" EXIT
|
|
||||||
TEST_PRIVATE_KEY_FILEPATH="$TEMPDIR/test_directory_not_file"
|
|
||||||
TEST_CMD="AUTOMATION_LIB_PATH=$TEMPDIR/automation/lib $TEMPDIR/automation/bin/ephemeral_gpg.sh"
|
|
||||||
unset PRIVATE_KEY_FILEPATH
|
|
||||||
|
|
||||||
##### MAIN() #####
|
|
||||||
|
|
||||||
test_cmd "Verify ephemeral_gpg can be installed under $TEMPDIR" \
|
|
||||||
0 'Installation complete for.+installed ephemeral_gpg' \
|
|
||||||
env INSTALL_PREFIX=$TEMPDIR $INSTALL_SCRIPT 0.0.0 ephemeral_gpg
|
|
||||||
|
|
||||||
test_cmd "Verify executing ephemeral_gpg.sh gives 'Expecting' error message" \
|
|
||||||
2 'ERROR.+Expecting.+empty' \
|
|
||||||
env $TEST_CMD
|
|
||||||
|
|
||||||
test_cmd "Verify creation of directory inside temporary install path is successful" \
|
|
||||||
0 "mkdir: created.+$TEST_PRIVATE_KEY_FILEPATH" \
|
|
||||||
mkdir -vp "$TEST_PRIVATE_KEY_FILEPATH"
|
|
||||||
|
|
||||||
test_cmd "Verify executing ephemeral_gpg.sh detects \$PRIVATE_GPG_FILEPATH directory" \
|
|
||||||
2 'ERROR.+Expecting.+file' \
|
|
||||||
env PRIVATE_KEY_FILEPATH=$TEST_PRIVATE_KEY_FILEPATH $TEST_CMD
|
|
||||||
|
|
||||||
test_cmd "Verify git_unattended_gpg.sh.in installed in library directory" \
|
|
||||||
0 "" \
|
|
||||||
test -r "$TEMPDIR/automation/lib/git_unattended_gpg.sh.in"
|
|
||||||
|
|
||||||
exit_with_status
|
|
|
@ -1,160 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Load standardized test harness
|
|
||||||
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/testlib.sh || exit 1
|
|
||||||
|
|
||||||
# Would otherwise get in the way of checking output & removing $TMPDIR
|
|
||||||
DEBUG=${DEBUG:-0}
|
|
||||||
SUBJ_FILEPATH="$TEST_DIR/$SUBJ_FILENAME"
|
|
||||||
export GITREPODIR=$(mktemp -p '' -d 'testbin-ephemeral_gpg_XXXXX.repo')
|
|
||||||
export PRIVATE_KEY_FILEPATH=$(mktemp -p '' "testbin-ephemeral_gpg_XXXXX.key")
|
|
||||||
export PRIVATE_PASSPHRASE_FILEPATH=$(mktemp -p '' "testbin-ephemeral_gpg_XXXXX.pass")
|
|
||||||
trap "rm -rf $GITREPODIR $PRIVATE_KEY_FILEPATH $RIVATE_PASSPHRASE_FILEPATH" EXIT
|
|
||||||
|
|
||||||
TEST_KEY_UID='Fine Oolong <foo@bar.baz>'
|
|
||||||
TEST_KEY_ID="C71D7CA13828797F29528BA25B786A278A6D48C5"
|
|
||||||
SIG_KEY_FPR="CBD7A22AD00CB77FD9B8F314A7D41FE6F7FE0989"
|
|
||||||
TEST_KEY_PASSPHRASE='bin_GdJN-bin_MwPa'
|
|
||||||
TEST_PRIV_PUB_KEY='
|
|
||||||
-----BEGIN PGP PRIVATE KEY BLOCK-----
|
|
||||||
|
|
||||||
lIYEXu0eiBYJKwYBBAHaRw8BAQdAmmXn0oLorwHlhHiVjs6TXBo8Lo1dsrG0NU1j
|
|
||||||
WGf01eb+BwMCvev3eznkTMLsp39YX5f1UX12uY7LuDg32Ka6N/maauL5ftlUtuxi
|
|
||||||
UIW0lP+9l34aqaBN4aTSLppVpSFEbo5EFv3H7NtoxyxholIM6ccdoLQZRmluZSBP
|
|
||||||
b2xvbmcgPGZvb0BiYXIuYmF6PoiQBBMWCAA4FiEExx18oTgoeX8pUouiW3hqJ4pt
|
|
||||||
SMUFAl7tHogCGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQW3hqJ4ptSMV7
|
|
||||||
lgD+MFzKRP/i4tmuLbnE6Xiwb4jxrrtz5pF7blSFPHJhEkEA/juxypMqFVJEgCf1
|
|
||||||
t3IFJTxh6Lkj9yZZiFjdRHLxD8kInIsEXu0eiBIKKwYBBAGXVQEFAQEHQEEkryan
|
|
||||||
kgJNY4w5o8dZd7N0g38j8U9qScFbo421hvoZAwEIB/4HAwJ9hWYQX1qmu+wrT6EO
|
|
||||||
rg5o9H9Mxo3L2LTKfw24eq+t9udUDOKaYXHXzFEmrOAQiPheZq0R4nGVN4Avf31l
|
|
||||||
A5bxCZZV/vQ0MIrt1W1f8r6NiHgEGBYIACAWIQTHHXyhOCh5fylSi6JbeGonim1I
|
|
||||||
xQUCXu0eiAIbDAAKCRBbeGonim1IxWCbAQCwTzKCAqza4VWqxX31D6ygIb0+9Otj
|
|
||||||
zQUZxE5jggDU2QEA/OlbISfm5+2NJGizJW/n+VozyfrAHr/JsmW8qbixAwachgRe
|
|
||||||
7R6JFgkrBgEEAdpHDwEBB0DvuGjjL4RKGK7DirQwLhpScrFnG6kHPWbVIpj+A4zQ
|
|
||||||
d/4HAwKly2aim7e1zOy26pXOgBV17gg4FAJ68Ug0uDD5TnkjynmqkWfTuIFvddyz
|
|
||||||
ByYmtxL4vbd+vgKb2vLxtmXDI5GvXaeBzfzDM8n8j7smYz/diO8EGBYIACAWIQTH
|
|
||||||
HXyhOCh5fylSi6JbeGonim1IxQUCXu0eiQIbAgCBCRBbeGonim1IxXYgBBkWCAAd
|
|
||||||
FiEEy9eiKtAMt3/ZuPMUp9Qf5vf+CYkFAl7tHokACgkQp9Qf5vf+CYmOwAD/Uy2j
|
|
||||||
HLsnhQ/IQYRxdbhW1N93q58gHcn6qlx77k/GojIA/0tFeY3N3NGJQF0V/JlCVSfZ
|
|
||||||
BJtu+41FD2jdRaWdm+gLuukBAPEzEncXlr02mdzkm6yiJmLm8nTmr0iLAhkNqn2C
|
|
||||||
Cp1XAP43Bl3JwwigFvgP19ydLCQ9Mqc5DfOVFS9UnFlnGSSeDZyGBF7tHooWCSsG
|
|
||||||
AQQB2kcPAQEHQIGZympIyDs48GfUyuDjkNcJRoFCLwJoyt6OjpvbzTi1/gcDArMo
|
|
||||||
IDOeZcFc7OcMNKPNICosTF8jRblG0/UYx/JmH999AGOeU5hPB4FnYLcsv+xLcw6s
|
|
||||||
SFQC10yCbs6edx8oA7UUkYKbSvbsK+MUBaF5GECIeAQYFggAIBYhBMcdfKE4KHl/
|
|
||||||
KVKLolt4aieKbUjFBQJe7R6KAhsgAAoJEFt4aieKbUjFrYQA/RNSOCZLckAgUV1G
|
|
||||||
DcuR1Epmfyymckq4ysCRp3KVnE8tAP9zT6TR/7uhd61X/xaa5ANsWUKDFuPFEp7n
|
|
||||||
/0ocs8zkApyLBF7tHooSCisGAQQBl1UBBQEBB0COYmfEzCxyCDUR6seA0HaUF9Bc
|
|
||||||
tBIloo+RTjvt54s+SAMBCAf+BwMCtn0weBQeArTsKOJ9t6yCgJExkrlpvgL1Nfkq
|
|
||||||
z0vy5StDbu/HuVKOTT2ecCoyclqKuA+S5E78pcJWPMoFBS3Vee6BVaDiRTjVN6kZ
|
|
||||||
rFXhWIh4BBgWCAAgFiEExx18oTgoeX8pUouiW3hqJ4ptSMUFAl7tHooCGwwACgkQ
|
|
||||||
W3hqJ4ptSMUccQD7Bd/g1ph10NFnvg6+2OSQgHuA7/HTSHEmH65Qm6WroXoBALq5
|
|
||||||
QKdFj22bniOLyMcRQi/fsYHiIRxEMec7v3RkR+YF
|
|
||||||
=BUzJ
|
|
||||||
-----END PGP PRIVATE KEY BLOCK-----
|
|
||||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
|
||||||
|
|
||||||
mDMEXu0eiBYJKwYBBAHaRw8BAQdAmmXn0oLorwHlhHiVjs6TXBo8Lo1dsrG0NU1j
|
|
||||||
WGf01ea0GUZpbmUgT29sb25nIDxmb29AYmFyLmJhej6IkAQTFggAOBYhBMcdfKE4
|
|
||||||
KHl/KVKLolt4aieKbUjFBQJe7R6IAhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheA
|
|
||||||
AAoJEFt4aieKbUjFe5YA/jBcykT/4uLZri25xOl4sG+I8a67c+aRe25UhTxyYRJB
|
|
||||||
AP47scqTKhVSRIAn9bdyBSU8Yei5I/cmWYhY3URy8Q/JCLg4BF7tHogSCisGAQQB
|
|
||||||
l1UBBQEBB0BBJK8mp5ICTWOMOaPHWXezdIN/I/FPaknBW6ONtYb6GQMBCAeIeAQY
|
|
||||||
FggAIBYhBMcdfKE4KHl/KVKLolt4aieKbUjFBQJe7R6IAhsMAAoJEFt4aieKbUjF
|
|
||||||
YJsBALBPMoICrNrhVarFffUPrKAhvT7062PNBRnETmOCANTZAQD86VshJ+bn7Y0k
|
|
||||||
aLMlb+f5WjPJ+sAev8myZbypuLEDBrgzBF7tHokWCSsGAQQB2kcPAQEHQO+4aOMv
|
|
||||||
hEoYrsOKtDAuGlJysWcbqQc9ZtUimP4DjNB3iO8EGBYIACAWIQTHHXyhOCh5fylS
|
|
||||||
i6JbeGonim1IxQUCXu0eiQIbAgCBCRBbeGonim1IxXYgBBkWCAAdFiEEy9eiKtAM
|
|
||||||
t3/ZuPMUp9Qf5vf+CYkFAl7tHokACgkQp9Qf5vf+CYmOwAD/Uy2jHLsnhQ/IQYRx
|
|
||||||
dbhW1N93q58gHcn6qlx77k/GojIA/0tFeY3N3NGJQF0V/JlCVSfZBJtu+41FD2jd
|
|
||||||
RaWdm+gLuukBAPEzEncXlr02mdzkm6yiJmLm8nTmr0iLAhkNqn2CCp1XAP43Bl3J
|
|
||||||
wwigFvgP19ydLCQ9Mqc5DfOVFS9UnFlnGSSeDbgzBF7tHooWCSsGAQQB2kcPAQEH
|
|
||||||
QIGZympIyDs48GfUyuDjkNcJRoFCLwJoyt6OjpvbzTi1iHgEGBYIACAWIQTHHXyh
|
|
||||||
OCh5fylSi6JbeGonim1IxQUCXu0eigIbIAAKCRBbeGonim1Ixa2EAP0TUjgmS3JA
|
|
||||||
IFFdRg3LkdRKZn8spnJKuMrAkadylZxPLQD/c0+k0f+7oXetV/8WmuQDbFlCgxbj
|
|
||||||
xRKe5/9KHLPM5AK4OARe7R6KEgorBgEEAZdVAQUBAQdAjmJnxMwscgg1EerHgNB2
|
|
||||||
lBfQXLQSJaKPkU477eeLPkgDAQgHiHgEGBYIACAWIQTHHXyhOCh5fylSi6JbeGon
|
|
||||||
im1IxQUCXu0eigIbDAAKCRBbeGonim1IxRxxAPsF3+DWmHXQ0We+Dr7Y5JCAe4Dv
|
|
||||||
8dNIcSYfrlCbpauhegEAurlAp0WPbZueI4vIxxFCL9+xgeIhHEQx5zu/dGRH5gWZ
|
|
||||||
AQ0EWZRoQQEIALNdMO3pmfJpW255kBIHOCcCYrXer1SuByH6wph4iF3KaO4xC1rH
|
|
||||||
Xk6SVy2atm2qt7mTA9Siwbf8Hb+KS49gSVweAY8vi4vkSbpLkL+ijN+RjOHBGtNJ
|
|
||||||
DO9iOwTgjfhOjhR0T0oD3vCtCMajPYHHvZYMvJbBy0PcMpC1h4dezBde1fAp+NDs
|
|
||||||
XLj4F/kg+Hvp0Dw0npS2OMsgOUqp3Etbhq0d7rFYVon5d0tS2wCuhlxcF5YMI9pu
|
|
||||||
5cIOxwlwslEplVzraA5lde09+geq9Q+nee4sDhiMf1umFusxYx/zXRbHP0lN3e/J
|
|
||||||
aAao1jTNsGp5mga/5O4TGVEdPwBIYVL1JUMAEQEAAbQ1R2l0SHViICh3ZWItZmxv
|
|
||||||
dyBjb21taXQgc2lnbmluZykgPG5vcmVwbHlAZ2l0aHViLmNvbT6JASIEEwEIABYF
|
|
||||||
AlmUaEEJEEruGPg6/esjAhsDAhkBAACZAQf+IBNYWaLajGUGHfACzJI0I1Xgg98M
|
|
||||||
Mx6HqPzRZPtyChftXvGok7Gt+uo8S2FDeGAcibtvkw9CVSvar5Q8vba38j4RIXr1
|
|
||||||
vRBYsVIwlsxKT4FeS9KMf2ryMA7T3zr+lKE9XkNTawzGfHgt8t8c2FPONcqiTCz7
|
|
||||||
ym0ny6Ew0gNd9e7ORkcjGheTes1yMM3lzlf6wrxQFEfT3YEglYI59pC3u/vCKDnh
|
|
||||||
Frbykz0ZscpdU3GZ0ukIXLO/Iy4KYg1hgAOwLzjxBAUXCd3gRCe6mjpC+ERXHU6P
|
|
||||||
vAPC+4fl0Ksu4vC3BdpCHSicbjnzenQsaazYx/kX4hyTqj/il46UGhnPjw==
|
|
||||||
=TnEk
|
|
||||||
-----END PGP PUBLIC KEY BLOCK-----
|
|
||||||
'
|
|
||||||
|
|
||||||
# These files are intentionally modified during script use.
|
|
||||||
restore_inputs(){
|
|
||||||
# Files may not have write-bit set
|
|
||||||
chmod 0600 "$PRIVATE_PASSPHRASE_FILEPATH" || true
|
|
||||||
chmod 0600 "$PRIVATE_KEY_FILEPATH" || true
|
|
||||||
echo "$TEST_KEY_PASSPHRASE" > "$PRIVATE_PASSPHRASE_FILEPATH"
|
|
||||||
echo "$TEST_PRIV_PUB_KEY" > "$PRIVATE_KEY_FILEPATH"
|
|
||||||
chmod 0600 "$PRIVATE_PASSPHRASE_FILEPATH"
|
|
||||||
chmod 0600 "$PRIVATE_KEY_FILEPATH"
|
|
||||||
}
|
|
||||||
|
|
||||||
rein_test_cmd() {
|
|
||||||
restore_inputs
|
|
||||||
test_cmd "${@}"
|
|
||||||
}
|
|
||||||
|
|
||||||
##### MAIN() #####
|
|
||||||
|
|
||||||
for var_name in PRIVATE_PASSPHRASE_FILEPATH PRIVATE_KEY_FILEPATH; do
|
|
||||||
# Assume 3-characters is "too small" and will fail
|
|
||||||
echo "foo" > "$PRIVATE_KEY_FILEPATH"
|
|
||||||
echo "bar" > "$PRIVATE_PASSPHRASE_FILEPATH"
|
|
||||||
test_cmd "Verify expected error when ${!var_name} file is too small" \
|
|
||||||
1 "must be larger than" \
|
|
||||||
$SUBJ_FILEPATH true
|
|
||||||
restore_inputs
|
|
||||||
chmod 0000 "${!var_name}"
|
|
||||||
test_cmd "Verify \$${var_name} must be writable check" \
|
|
||||||
1 "ERROR:.+file.+writable" \
|
|
||||||
$SUBJ_FILEPATH true
|
|
||||||
restore_inputs
|
|
||||||
done
|
|
||||||
|
|
||||||
for must_have in 'uid:u:.+:Fine' 'sub:.+:A7D41FE6F7FE0989' 'uid:.+:GitHub'; do
|
|
||||||
rein_test_cmd "Verify key listing of imported keys contains '$must_have'" \
|
|
||||||
0 "$must_have" \
|
|
||||||
$SUBJ_FILEPATH gpg --list-keys --quiet --batch --with-colons --keyid-format LONG
|
|
||||||
done
|
|
||||||
|
|
||||||
rein_test_cmd "Confirm can create repository" \
|
|
||||||
0 "Initialized empty Git repository" \
|
|
||||||
$SUBJ_FILEPATH git init "$GITREPODIR"
|
|
||||||
|
|
||||||
BASH_GIT_REPO="set -e; cd $GITREPODIR;"
|
|
||||||
echo "$RANDOM$RANDOM$RANDOM" > "$GITREPODIR/testfile"
|
|
||||||
|
|
||||||
rein_test_cmd "Confirm use bash command string for git committing" \
|
|
||||||
0 "commit_message.+file changed.+testfile" \
|
|
||||||
$SUBJ_FILEPATH bash -c "$BASH_GIT_REPO git add testfile; git commit -sm commit_message 2>&1"
|
|
||||||
|
|
||||||
rein_test_cmd "Verify last commit passes signature verification" \
|
|
||||||
0 "gpg.+Signature.+$SIG_KEY_FPR.+Good signature.+ultimate.+Author.+Fine" \
|
|
||||||
$SUBJ_FILEPATH bash -c "$BASH_GIT_REPO git log -1 HEAD 2>&1"
|
|
||||||
|
|
||||||
rein_test_cmd "Confirm a signed tag can be added for HEAD" \
|
|
||||||
0 ""
|
|
||||||
$SUBJ_FILEPATH bash -c "$BASH_GIT_REPO git tag -as v0.0.0 -m tag_annotation HEAD 2>&1"
|
|
||||||
|
|
||||||
rein_test_cmd "Verify tag can be verified" \
|
|
||||||
0 "$SIG_KEY_FPR.+Good signature.+tagger Fine Oolong" \
|
|
||||||
$SUBJ_FILEPATH bash -c "$BASH_GIT_REPO git tag --verify v0.0.0 2>&1"
|
|
||||||
|
|
||||||
# Files may not have write-bit set
|
|
||||||
chmod 0600 "$PRIVATE_PASSPHRASE_FILEPATH" || true
|
|
||||||
chmod 0600 "$PRIVATE_KEY_FILEPATH" || true
|
|
||||||
exit_with_status
|
|
|
@ -1,146 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Load standardized test harness
|
|
||||||
source $(dirname $(realpath "${BASH_SOURCE[0]}"))/testlib.sh || exit 1
|
|
||||||
|
|
||||||
# Would otherwise get in the way of checking output & removing $TMPDIR
|
|
||||||
DEBUG=0
|
|
||||||
source "$TEST_DIR/$SUBJ_FILENAME"
|
|
||||||
|
|
||||||
PRIVATE_TEMPDIR=$(mktemp -p '' -d "testlib-ephemeral_gpg_XXXXX")
|
|
||||||
|
|
||||||
verify_export_test() {
|
|
||||||
test_cmd "Verify status file contains only one exported success message" \
|
|
||||||
0 'EXPORTED \w+\s$' \
|
|
||||||
grep ' EXPORTED ' $GPG_STATUS_FILEPATH
|
|
||||||
}
|
|
||||||
|
|
||||||
##### MAIN() #####
|
|
||||||
|
|
||||||
unset PRIVATE_KEY_FILEPATH
|
|
||||||
test_cmd "Confirm calling verify_env_vars with no environment gives 'Expecting' error message" \
|
|
||||||
2 'ERROR.+Expecting.+empty' \
|
|
||||||
verify_env_vars
|
|
||||||
|
|
||||||
PRIVATE_KEY_FILEPATH=$(mktemp -p "$PRIVATE_TEMPDIR" "testlib-ephemeral_gpg_XXXXX.asc")
|
|
||||||
PRIVATE_PASSPHRASE_FILEPATH=$(mktemp -p "$PRIVATE_TEMPDIR" "testlib-ephemeral_gpg_XXXXX.pass")
|
|
||||||
dd if=/dev/zero "of=$PRIVATE_KEY_FILEPATH" bs=1M count=1 &> /dev/null
|
|
||||||
dd if=/dev/zero "of=$PRIVATE_PASSPHRASE_FILEPATH" bs=1M count=1 &> /dev/null
|
|
||||||
|
|
||||||
test_cmd "Confirm calling verify_env_vars() succeeds with variables set" \
|
|
||||||
0 '' \
|
|
||||||
verify_env_vars
|
|
||||||
|
|
||||||
# Sensitive env. vars are not leaked after go_ephemeral is called
|
|
||||||
for sensitive_varname in DISPLAY XAUTHORITY DBUS_SESSION_BUS_ADDRESS PINENTRY_USER_DATA; do
|
|
||||||
expected_value="testing_${RANDOM}_testing"
|
|
||||||
eval "$sensitive_varname=$expected_value"
|
|
||||||
export $sensitive_varname
|
|
||||||
# Careful: Must also regex match the newline at the end of output
|
|
||||||
test_cmd "Confirm that a non-empty value for \$$sensitive_varname is set" \
|
|
||||||
0 "^$sensitive_varname=$expected_value\s$" \
|
|
||||||
bash -c "echo $sensitive_varname=\$$sensitive_varname"
|
|
||||||
go_ephemeral; rm -rf "$GNUPGHOME"; unset GNUPGHOME # normally cleans up on exit
|
|
||||||
actual_value="${!sensitive_varname}"
|
|
||||||
test_cmd "Confirm that an empty value for \$$sensitive_varname is set" \
|
|
||||||
0 "^$sensitive_varname=\s$" \
|
|
||||||
bash -c "echo $sensitive_varname=\$$sensitive_varname"
|
|
||||||
done
|
|
||||||
|
|
||||||
test_cmd "Verify gpg_cmd() notices when go_ephemeral() isn't called first" \
|
|
||||||
1 "ERROR.+go_ephemeral" \
|
|
||||||
gpg_cmd --foo --bar
|
|
||||||
|
|
||||||
TEST_PASSPHRASE="testing_${RANDOM}_testing_${RANDOM}"
|
|
||||||
echo "$TEST_PASSPHRASE" > "$PRIVATE_PASSPHRASE_FILEPATH"
|
|
||||||
|
|
||||||
go_ephemeral
|
|
||||||
|
|
||||||
test_cmd "Verify \$PRIVATE_PASSPHRASE_FILEPATH file was consumed" \
|
|
||||||
0 ''
|
|
||||||
test $(stat --format=%s "$PRIVATE_PASSPHRASE_FILEPATH") -eq 0
|
|
||||||
|
|
||||||
test_cmd "Verify print_cached_key warning when cache is empty" \
|
|
||||||
0 'WARNING: Empty key cache.+testlib-ephemeral_gpg.sh:[[:digit:]]+' \
|
|
||||||
print_cached_key
|
|
||||||
|
|
||||||
# Adds an encr and signing subkeys by default
|
|
||||||
test_cmd "Verify quick key generation command works with gpg_cmd()" \
|
|
||||||
0 "" \
|
|
||||||
gpg_cmd --quick-generate-key foo@bar.baz default default never
|
|
||||||
|
|
||||||
test_cmd "Verify status file contents ends with success message" \
|
|
||||||
0 'KEY_CREATED B \w+' \
|
|
||||||
tail -1 $GPG_STATUS_FILEPATH
|
|
||||||
|
|
||||||
# The test for this function is all the following other tests :D
|
|
||||||
GPG_KEY_ID=$(print_cached_key)
|
|
||||||
|
|
||||||
# These are not added by default
|
|
||||||
for usage in sign auth; do
|
|
||||||
test_cmd "Verify that a $usage subkey can be added" \
|
|
||||||
0 "" \
|
|
||||||
gpg_cmd --quick-add-key $GPG_KEY_ID default $usage
|
|
||||||
done
|
|
||||||
|
|
||||||
test_cmd "Verify invalid default key id can not be set" \
|
|
||||||
1 "ERROR: Non-existing key 'abcd1234'" \
|
|
||||||
set_default_keyid "abcd1234"
|
|
||||||
|
|
||||||
test_cmd "Verify generated secret key can be exported without console input" \
|
|
||||||
0 "" \
|
|
||||||
gpg_cmd --export-secret-keys --armor \
|
|
||||||
--output "$GNUPGHOME/foo-bar_baz-secret.asc" foo@bar.baz
|
|
||||||
|
|
||||||
verify_export_test
|
|
||||||
|
|
||||||
test_cmd "Verify an ascii-armor key was exported" \
|
|
||||||
0 "" \
|
|
||||||
egrep -qi 'BEGIN PGP PRIVATE KEY BLOCK' "$GNUPGHOME/foo-bar_baz-secret.asc"
|
|
||||||
|
|
||||||
test_cmd "Verify ID of exported key was cached" \
|
|
||||||
0 "[[:alnum:]]{32}" \
|
|
||||||
print_cached_key
|
|
||||||
|
|
||||||
test_cmd "Verify trust_github() can import public key" \
|
|
||||||
0 "" \
|
|
||||||
trust_github
|
|
||||||
|
|
||||||
# Also confirms can export correct key after importing github
|
|
||||||
test_cmd "Verify generated public key can be exported without console input" \
|
|
||||||
0 "" \
|
|
||||||
gpg_cmd --export --armor --output "$GNUPGHOME/foo-bar_baz-public.asc" foo@bar.baz
|
|
||||||
|
|
||||||
verify_export_test
|
|
||||||
|
|
||||||
test_cmd "Verify valid default key id can not be set" \
|
|
||||||
0 "" \
|
|
||||||
set_default_keyid "$GPG_KEY_ID"
|
|
||||||
|
|
||||||
# Key IDs are always 16-bytes long
|
|
||||||
for kind in sec enc sig auth; do
|
|
||||||
test_cmd "Verify $kind key ID can be obtained" \
|
|
||||||
0 "[[:alnum:]]{16}" \
|
|
||||||
get_${kind}_key_id "$GPG_KEY_ID"
|
|
||||||
done
|
|
||||||
|
|
||||||
test_cmd "Verify git setup fails if uid record doesn't match required e-mail address format" \
|
|
||||||
1 "non-empty uid string" \
|
|
||||||
configure_git_gpg "$GPG_KEY_ID"
|
|
||||||
|
|
||||||
gpg_cmd --command-fd 0 --edit-key "$GPG_KEY_ID" <<<"
|
|
||||||
adduid
|
|
||||||
Samuel O. Mebody
|
|
||||||
somebody@example.com
|
|
||||||
this is a test comment
|
|
||||||
save
|
|
||||||
" > /dev/null # We don't need to see this (most of the time)
|
|
||||||
|
|
||||||
test_cmd "Verify git setup uses the last UID found" \
|
|
||||||
0 "" \
|
|
||||||
configure_git_gpg "$GPG_KEY_ID"
|
|
||||||
|
|
||||||
# Cleanup stuff we created
|
|
||||||
rm -rf "$GNUPGHOME" # Cannot rely on EXIT trap
|
|
||||||
rm -rf $PRIVATE_TEMPDIR
|
|
||||||
exit_with_status
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
# Installs common Github Action utilities system-wide. NOT intended to be used directly
|
# Installs common Github Action utilities system-wide. NOT intended to be used directly
|
||||||
# by humans, should only be used indirectly by running
|
# by humans, should only be used indirectly by running
|
||||||
# ../bin/install_automation.sh <ver> github
|
# ../bin/install_automation.sh <ver> github
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
source "$AUTOMATION_LIB_PATH/anchors.sh"
|
||||||
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
source "$AUTOMATION_LIB_PATH/console_output.sh"
|
||||||
|
|
||||||
|
@ -21,12 +24,11 @@ if [[ $UID -eq 0 ]]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cd $(dirname $(realpath "${BASH_SOURCE[0]}"))
|
cd $(dirname $(realpath "${BASH_SOURCE[0]}"))
|
||||||
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/bin" ./bin/*
|
|
||||||
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/lib" ./lib/*
|
install -v $INST_PERM_ARG -D -t "$INSTALL_PREFIX/lib" ./lib/*
|
||||||
|
|
||||||
# Needed for installer testing
|
# Needed for installer testing
|
||||||
cat <<EOF>>"./environment"
|
cat <<EOF>>"./environment"
|
||||||
# Added on $(date --iso-8601=minutes) by 'github' subcomponent installer
|
# Added on $(date --iso-8601=minutes) by 'github' subcomponent installer
|
||||||
GITHUB_ACTION_LIB=$INSTALL_PREFIX/lib/github.sh
|
export GITHUB_ACTION_LIB=$INSTALL_PREFIX/lib/github.sh
|
||||||
EOF
|
EOF
|
||||||
echo "Successfully installed $INSTALL_NAME"
|
echo "Successfully installed $INSTALL_NAME"
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# This file is intended for sourcing by the cirrus-ci_retrospective workflow
|
# This file is intended for sourcing by the cirrus-ci_retrospective workflow
|
||||||
# It should not be used under any other context.
|
# It should not be used under any other context.
|
||||||
|
|
||||||
source $(dirname $BASH_SOURCE[0])/github_common.sh || exit 1
|
source $(dirname ${BASH_SOURCE[0]})/github_common.sh || exit 1
|
||||||
|
|
||||||
# Cirrus-CI Build status codes that represent completion
|
# Cirrus-CI Build status codes that represent completion
|
||||||
COMPLETE_STATUS_RE='FAILED|COMPLETED|ABORTED|ERRORED'
|
COMPLETE_STATUS_RE='FAILED|COMPLETED|ABORTED|ERRORED'
|
||||||
|
@ -63,7 +63,7 @@ load_ccir() {
|
||||||
was_pr='true'
|
was_pr='true'
|
||||||
# Don't race vs another cirrus-ci build triggered _after_ GH action workflow started
|
# Don't race vs another cirrus-ci build triggered _after_ GH action workflow started
|
||||||
# since both may share the same check_suite. e.g. task re-run or manual-trigger
|
# since both may share the same check_suite. e.g. task re-run or manual-trigger
|
||||||
if echo "$bst" | egrep -q "$COMPLETE_STATUS_RE"; then
|
if echo "$bst" | grep -E -q "$COMPLETE_STATUS_RE"; then
|
||||||
if [[ -n "$tst" ]] && [[ "$tst" == "PAUSED" ]]; then
|
if [[ -n "$tst" ]] && [[ "$tst" == "PAUSED" ]]; then
|
||||||
dbg "Detected action status $tst"
|
dbg "Detected action status $tst"
|
||||||
do_intg='true'
|
do_intg='true'
|
||||||
|
|
|
@ -5,22 +5,50 @@
|
||||||
# Important paths defined here
|
# Important paths defined here
|
||||||
AUTOMATION_LIB_PATH="${AUTOMATION_LIB_PATH:-$(realpath $(dirname ${BASH_SOURCE[0]})/../../common/lib)}"
|
AUTOMATION_LIB_PATH="${AUTOMATION_LIB_PATH:-$(realpath $(dirname ${BASH_SOURCE[0]})/../../common/lib)}"
|
||||||
|
|
||||||
# Override default library message prefixes to those consumed by Github Actions
|
|
||||||
# https://help.github.com/en/actions/reference/workflow-commands-for-github-actions
|
|
||||||
# Doesn't work properly w/o $ACTIONS_STEP_DEBUG=true
|
|
||||||
DEBUG_MSG_PREFIX="::debug::"
|
|
||||||
# Translation to usage throughout common-library
|
|
||||||
if [[ "${ACTIONS_STEP_DEBUG:-false}" == 'true' ]]; then
|
|
||||||
DEBUG=1
|
|
||||||
fi
|
|
||||||
# Highlight these messages in the Github Action WebUI
|
|
||||||
WARNING_MSG_PREFIX="::warning::"
|
|
||||||
ERROR_MSG_PREFIX="::error::"
|
|
||||||
|
|
||||||
source $AUTOMATION_LIB_PATH/common_lib.sh || exit 1
|
source $AUTOMATION_LIB_PATH/common_lib.sh || exit 1
|
||||||
|
|
||||||
|
# Wrap the die() function to add github-action sugar that identifies file
|
||||||
|
# & line number within the UI, before exiting non-zero.
|
||||||
|
rename_function die _die
|
||||||
|
die() {
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message
|
||||||
|
local ERROR_MSG_PREFIX
|
||||||
|
ERROR_MSG_PREFIX="::error file=${BASH_SOURCE[1]},line=${BASH_LINENO[0]}::"
|
||||||
|
_die "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Wrap the warn() function to add github-action sugar that identifies file
|
||||||
|
# & line number within the UI.
|
||||||
|
rename_function warn _warn
|
||||||
|
warn() {
|
||||||
|
local WARNING_MSG_PREFIX
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message
|
||||||
|
WARNING_MSG_PREFIX="::warning file=${BASH_SOURCE[1]},line=${BASH_LINENO[0]}::"
|
||||||
|
_warn "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Idomatic debug messages in github-actions are worse than useless. They do
|
||||||
|
# not embed file/line information. They are completely hidden unless
|
||||||
|
# the $ACTIONS_STEP_DEBUG step or job variable is set 'true'. If setting
|
||||||
|
# this variable as a secret, can have unintended conseuqences:
|
||||||
|
# https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/using-workflow-run-logs#viewing-logs-to-diagnose-failures
|
||||||
|
# Wrap the dbg() function to add github-action sugar at the "notice" level
|
||||||
|
# so that it may be observed in output by regular users without danger.
|
||||||
|
rename_function dbg _dbg
|
||||||
|
dbg() {
|
||||||
|
# When set true, simply enable automation library debugging.
|
||||||
|
if [[ "${ACTIONS_STEP_DEBUG:-false}" == 'true' ]]; then export A_DEBUG=1; fi
|
||||||
|
|
||||||
|
# notice-level messages actually show up in the UI use them for debugging
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-notice-message
|
||||||
|
local DEBUG_MSG_PREFIX
|
||||||
|
DEBUG_MSG_PREFIX="::notice file=${BASH_SOURCE[1]},line=${BASH_LINENO[0]}::"
|
||||||
|
_dbg "$@"
|
||||||
|
}
|
||||||
|
|
||||||
# usage: set_out_var <name> [value...]
|
# usage: set_out_var <name> [value...]
|
||||||
set_out_var() {
|
set_out_var() {
|
||||||
|
A_DEBUG=0 req_env_vars GITHUB_OUTPUT
|
||||||
name=$1
|
name=$1
|
||||||
shift
|
shift
|
||||||
value="$@"
|
value="$@"
|
||||||
|
@ -28,5 +56,6 @@ set_out_var() {
|
||||||
die "Expecting first parameter to be non-empty value for the output variable name"
|
die "Expecting first parameter to be non-empty value for the output variable name"
|
||||||
dbg "Setting Github Action step output variable '$name' to '$value'"
|
dbg "Setting Github Action step output variable '$name' to '$value'"
|
||||||
# Special string recognized by Github Actions
|
# Special string recognized by Github Actions
|
||||||
printf "\n::set-output name=$name::%s\n" "$value"
|
# Ref: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
|
||||||
|
echo "$name=$value" >> $GITHUB_OUTPUT
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,11 +21,11 @@ test_cmd 'Default shell variables are initialized empty/false' \
|
||||||
0 '^falsefalse$' \
|
0 '^falsefalse$' \
|
||||||
echo -n "${prn}${tid}${sha}${tst}${was_pr}${do_intg}"
|
echo -n "${prn}${tid}${sha}${tst}${was_pr}${do_intg}"
|
||||||
|
|
||||||
# Remaining tests all require debuging output to be enabled
|
# Remaining tests all require debugging output to be enabled
|
||||||
DEBUG=1
|
A_DEBUG=1
|
||||||
|
|
||||||
test_cmd 'The debugging function does not throw any errors and uses special debug output' \
|
test_cmd 'The debugging function does not throw any errors and redirects to notice-level output' \
|
||||||
0 '::debug::' \
|
0 '::notice' \
|
||||||
dbg_ccir
|
dbg_ccir
|
||||||
|
|
||||||
test_cmd "The \$MONITOR_TASK variable is defined an non-empty" \
|
test_cmd "The \$MONITOR_TASK variable is defined an non-empty" \
|
||||||
|
@ -91,8 +91,8 @@ for regex in '"id": "10"' $MONITOR_TASK $ACTION_TASK '"branch": "pull/12"' \
|
||||||
load_ccir "$TESTTEMPDIR"
|
load_ccir "$TESTTEMPDIR"
|
||||||
done
|
done
|
||||||
|
|
||||||
# Remaining tests all require debuging output disabled
|
# Remaining tests all require debugging output disabled
|
||||||
DEBUG=0
|
A_DEBUG=0
|
||||||
|
|
||||||
write_ccir 1 2 3 PAUSED COMPLETED
|
write_ccir 1 2 3 PAUSED COMPLETED
|
||||||
load_ccir "$TESTTEMPDIR"
|
load_ccir "$TESTTEMPDIR"
|
||||||
|
|
|
@ -3,39 +3,61 @@
|
||||||
source $(dirname $BASH_SOURCE[0])/testlib.sh
|
source $(dirname $BASH_SOURCE[0])/testlib.sh
|
||||||
|
|
||||||
# This is necessary when executing from a Github Action workflow so it ignores
|
# This is necessary when executing from a Github Action workflow so it ignores
|
||||||
# all magic output tokens
|
# all magic output sugar.
|
||||||
echo "::stop-commands::TESTING"
|
_MAGICTOKEN="TEST${RANDOM}TEST" # must be randomly generated / unguessable
|
||||||
trap "echo '::TESTING::'" EXIT
|
echo "::stop-commands::$_MAGICTOKEN"
|
||||||
|
trap "echo '::$_MAGICTOKEN::'" EXIT
|
||||||
test_cmd "The library $TEST_DIR/$SUBJ_FILENAME loads" \
|
|
||||||
0 '' \
|
|
||||||
source $TEST_DIR/$SUBJ_FILENAME
|
|
||||||
|
|
||||||
DEBUG=1
|
|
||||||
ACTIONS_STEP_DEBUG=true
|
|
||||||
# Should update $DEBUG value
|
|
||||||
source $TEST_DIR/$SUBJ_FILENAME || exit 1 # can't continue w/o loaded library
|
|
||||||
|
|
||||||
test_cmd "The debug message prefix is compatible with github actions commands" \
|
|
||||||
0 '::debug:: This is a test debug message' \
|
|
||||||
dbg 'This is a test debug message'
|
|
||||||
|
|
||||||
unset ACTIONS_STEP_DEBUG
|
unset ACTIONS_STEP_DEBUG
|
||||||
unset DEBUG
|
unset A_DEBUG
|
||||||
# Should update $DEBUG value
|
source $TEST_DIR/$SUBJ_FILENAME || exit 1 # can't continue w/o loaded library
|
||||||
source $TEST_DIR/$SUBJ_FILENAME
|
|
||||||
|
|
||||||
test_cmd "No debug message shows when ACTIONS_STEP_DEBUG is undefined" \
|
test_cmd "No debug message shows when A_DEBUG and ACTIONS_STEP_DEBUG are undefined" \
|
||||||
0 '' \
|
0 '' \
|
||||||
dbg 'This debug message should not appear'
|
dbg 'This debug message should not appear'
|
||||||
|
|
||||||
test_cmd "The warning message prefix is compatible with github actions commands" \
|
export A_DEBUG=1
|
||||||
0 '::warning:: This is a test warning message' \
|
test_cmd "A debug notice message shows when A_DEBUG is true" \
|
||||||
|
0 '::notice file=.+,line=.+:: This is a debug message' \
|
||||||
|
dbg "This is a debug message"
|
||||||
|
unset A_DEBUG
|
||||||
|
|
||||||
|
export ACTIONS_STEP_DEBUG="true"
|
||||||
|
test_cmd "A debug notice message shows when ACTIONS_STEP_DEBUG is true" \
|
||||||
|
0 '::notice file=.+,line=.+:: This is also a debug message' \
|
||||||
|
dbg "This is also a debug message"
|
||||||
|
unset ACTIONS_STEP_DEBUG
|
||||||
|
unset A_DEBUG
|
||||||
|
|
||||||
|
test_cmd "Warning messages contain github-action sugar." \
|
||||||
|
0 '::warning file=.+,line=.+:: This is a test warning message' \
|
||||||
warn 'This is a test warning message'
|
warn 'This is a test warning message'
|
||||||
|
|
||||||
test_cmd "The github actions command for setting output parameter is formatted as expected" \
|
test_cmd "Error messages contain github-action sugar." \
|
||||||
0 '::set-output name=TESTING_NAME::TESTING VALUE' \
|
0 '::error file=.+,line=.+:: This is a test error message' \
|
||||||
|
die 'This is a test error message' 0
|
||||||
|
|
||||||
|
unset GITHUB_OUTPUT_FUDGED
|
||||||
|
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||||
|
# Not executing under github-actions
|
||||||
|
GITHUB_OUTPUT=$(mktemp -p '' tmp_$(basename ${BASH_SOURCE[0]})_XXXX)
|
||||||
|
GITHUB_OUTPUT_FUDGED=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
test_cmd "The set_out_var function normally produces no output" \
|
||||||
|
0 '' \
|
||||||
set_out_var TESTING_NAME TESTING VALUE
|
set_out_var TESTING_NAME TESTING VALUE
|
||||||
|
|
||||||
# Must be the last command in this file
|
export A_DEBUG=1
|
||||||
|
test_cmd "The set_out_var function is debugable" \
|
||||||
|
0 "::notice file=.+line=.+:: Setting Github.+'DEBUG_TESTING_NAME' to 'DEBUGGING TESTING VALUE'" \
|
||||||
|
set_out_var DEBUG_TESTING_NAME DEBUGGING TESTING VALUE
|
||||||
|
unset A_DEBUG
|
||||||
|
|
||||||
|
test_cmd "Previous set_out_var function properly sets a step-output value" \
|
||||||
|
0 'TESTING_NAME=TESTING VALUE' \
|
||||||
|
cat $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Must be the last commands in this file
|
||||||
|
if ((GITHUB_OUTPUT_FUDGED)); then rm -f "$GITHUB_OUTPUT"; fi
|
||||||
exit_with_status
|
exit_with_status
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
/Cron.log
|
||||||
|
/utilization.csv
|
||||||
|
/dh_status.txt*
|
||||||
|
/pw_status.txt*
|
||||||
|
/html/utilization.png*
|
|
@ -0,0 +1,200 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This script is intended for use by humans to allocate a dedicated-host
|
||||||
|
# and create an instance on it for testing purposes. When executed,
|
||||||
|
# it will create a temporary clone of the repository with the necessary
|
||||||
|
# modifications to manipulate the test host. It's the user's responsibility
|
||||||
|
# to cleanup this directory after manually removing the instance (see below).
|
||||||
|
#
|
||||||
|
# **Note**: Due to Apple/Amazon restrictions on the removal of these
|
||||||
|
# resources, cleanup must be done manually. You will need to shutdown and
|
||||||
|
# terminate the instance, then wait 24-hours before releasing the
|
||||||
|
# dedicated-host. The hosts cost money w/n an instance is running.
|
||||||
|
#
|
||||||
|
# The script assumes:
|
||||||
|
#
|
||||||
|
# * The current $USER value reflects your actual identity such that
|
||||||
|
# the test instance may be labeled appropriatly for auditing.
|
||||||
|
# * The `aws` CLI tool is installed on $PATH.
|
||||||
|
# * Appropriate `~/.aws/credentials` credentials are setup.
|
||||||
|
# * The us-east-1 region is selected in `~/.aws/config`.
|
||||||
|
# * The $POOLTOKEN env. var. is set to value available from
|
||||||
|
# https://cirrus-ci.com/pool/1cf8c7f7d7db0b56aecd89759721d2e710778c523a8c91c7c3aaee5b15b48d05
|
||||||
|
# * The local ssh-agent is able to supply the appropriate private key (stored in BW).
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
# shellcheck source-path=SCRIPTDIR
|
||||||
|
source $(dirname ${BASH_SOURCE[0]})/pw_lib.sh
|
||||||
|
|
||||||
|
# Support debugging all mac_pw_pool scripts or only this one
|
||||||
|
I_DEBUG="${I_DEBUG:0}"
|
||||||
|
if ((I_DEBUG)); then
|
||||||
|
X_DEBUG=1
|
||||||
|
warn "Debugging enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
dbg "\$USER=$USER"
|
||||||
|
|
||||||
|
[[ -n "$USER" ]] || \
|
||||||
|
die "The variable \$USER must not be empty"
|
||||||
|
|
||||||
|
[[ -n "$POOLTOKEN" ]] || \
|
||||||
|
die "The variable \$POOLTOKEN must not be empty"
|
||||||
|
|
||||||
|
INST_NAME="${USER}Testing"
|
||||||
|
LIB_DIRNAME=$(realpath --relative-to=$REPO_DIRPATH $LIB_DIRPATH)
|
||||||
|
# /tmp is usually a tmpfs, don't let an accidental reboot ruin
|
||||||
|
# access to a test DH/instance for a developer.
|
||||||
|
TMP_CLONE_DIRPATH="/var/tmp/${LIB_DIRNAME}_${INST_NAME}"
|
||||||
|
|
||||||
|
dbg "\$TMP_CLONE_DIRPATH=$TMP_CLONE_DIRPATH"
|
||||||
|
|
||||||
|
if [[ -d "$TMP_CLONE_DIRPATH" ]]; then
|
||||||
|
die "Found existing '$TMP_CLONE_DIRPATH', assuming in-use/relevant; If not, manual cleanup is required."
|
||||||
|
fi
|
||||||
|
|
||||||
|
msg "Creating temporary clone dir and transfering any uncommited files."
|
||||||
|
|
||||||
|
git clone --no-local --no-hardlinks --depth 1 --single-branch --no-tags --quiet "file://$REPO_DIRPATH" "$TMP_CLONE_DIRPATH"
|
||||||
|
declare -a uncommited_filepaths
|
||||||
|
readarray -t uncommited_filepaths <<<$(
|
||||||
|
pushd "$REPO_DIRPATH" &> /dev/null
|
||||||
|
# Obtaining uncommited relative staged filepaths
|
||||||
|
git diff --name-only HEAD
|
||||||
|
# Obtaining uncommited relative unstaged filepaths
|
||||||
|
git ls-files . --exclude-standard --others
|
||||||
|
popd &> /dev/null
|
||||||
|
)
|
||||||
|
|
||||||
|
dbg "Copying \$uncommited_filepaths[*]=${uncommited_filepaths[*]}"
|
||||||
|
|
||||||
|
for uncommited_file in "${uncommited_filepaths[@]}"; do
|
||||||
|
uncommited_file_src="$REPO_DIRPATH/$uncommited_file"
|
||||||
|
uncommited_file_dest="$TMP_CLONE_DIRPATH/$uncommited_file"
|
||||||
|
uncommited_file_dest_parent=$(dirname "$uncommited_file_dest")
|
||||||
|
#dbg "Working on uncommited file '$uncommited_file_src'"
|
||||||
|
if [[ -r "$uncommited_file_src" ]]; then
|
||||||
|
mkdir -p "$uncommited_file_dest_parent"
|
||||||
|
#dbg "$uncommited_file_src -> $uncommited_file_dest"
|
||||||
|
cp -a "$uncommited_file_src" "$uncommited_file_dest"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
declare -a modargs
|
||||||
|
# Format: <pw_lib.sh var name> <new value> <old value>
|
||||||
|
modargs=(
|
||||||
|
# Necessary to prevent in-production macs from trying to use testing instance
|
||||||
|
"DH_REQ_VAL $INST_NAME $DH_REQ_VAL"
|
||||||
|
# Necessary to make test dedicated host stand out when auditing the set in the console
|
||||||
|
"DH_PFX $INST_NAME $DH_PFX"
|
||||||
|
# The default launch template name includes $DH_PFX, ensure the production template name is used.
|
||||||
|
# N/B: The old/unmodified pw_lib.sh is still loaded for the running script
|
||||||
|
"TEMPLATE_NAME $TEMPLATE_NAME Cirrus${DH_PFX}PWinstance"
|
||||||
|
# Permit developer to use instance for up to 3 days max (orphan vm cleaning process will nail it after that).
|
||||||
|
"PW_MAX_HOURS 72 $PW_MAX_HOURS"
|
||||||
|
# Permit developer to execute as many Cirrus-CI tasks as they want w/o automatic shutdown.
|
||||||
|
"PW_MAX_TASKS 9999 $PW_MAX_TASKS"
|
||||||
|
)
|
||||||
|
|
||||||
|
for modarg in "${modargs[@]}"; do
|
||||||
|
set -- $modarg # Convert the "tuple" into the param args $1 $2...
|
||||||
|
dbg "Modifying pw_lib.sh \$$1 definition to '$2' (was '$3')"
|
||||||
|
sed -i -r -e "s/^$1=.*/$1=\"$2\"/" "$TMP_CLONE_DIRPATH/$LIB_DIRNAME/pw_lib.sh"
|
||||||
|
# Ensure future script invocations use the new values
|
||||||
|
unset $1
|
||||||
|
done
|
||||||
|
|
||||||
|
cd "$TMP_CLONE_DIRPATH/$LIB_DIRNAME"
|
||||||
|
source ./pw_lib.sh
|
||||||
|
|
||||||
|
# Before going any further, make sure there isn't an existing
|
||||||
|
# dedicated-host named ${INST_NAME}-0. If there is, it can
|
||||||
|
# be re-used instead of failing the script outright.
|
||||||
|
existing_dh_json=$(mktemp -p "." dh_allocate_XXXXX.json)
|
||||||
|
$AWS ec2 describe-hosts --filter "Name=tag:Name,Values=${INST_NAME}-0" --query 'Hosts[].HostId' > "$existing_dh_json"
|
||||||
|
if grep -Fqx '[]' "$existing_dh_json"; then
|
||||||
|
|
||||||
|
msg "Creating the dedicated host '${INST_NAME}-0'"
|
||||||
|
declare dh_allocate_json
|
||||||
|
dh_allocate_json=$(mktemp -p "." dh_allocate_XXXXX.json)
|
||||||
|
|
||||||
|
declare -a awsargs
|
||||||
|
# Word-splitting of $AWS is desireable
|
||||||
|
# shellcheck disable=SC2206
|
||||||
|
awsargs=(
|
||||||
|
$AWS
|
||||||
|
ec2 allocate-hosts
|
||||||
|
--availability-zone us-east-1a
|
||||||
|
--instance-type mac2.metal
|
||||||
|
--auto-placement off
|
||||||
|
--host-recovery off
|
||||||
|
--host-maintenance off
|
||||||
|
--quantity 1
|
||||||
|
--tag-specifications
|
||||||
|
"ResourceType=dedicated-host,Tags=[{Key=Name,Value=${INST_NAME}-0},{Key=$DH_REQ_TAG,Value=$DH_REQ_VAL},{Key=PWPoolReady,Value=true},{Key=automation,Value=false}]"
|
||||||
|
)
|
||||||
|
|
||||||
|
# N/B: Apple/Amazon require min allocation time of 24hours!
|
||||||
|
dbg "Executing: ${awsargs[*]}"
|
||||||
|
"${awsargs[@]}" > "$dh_allocate_json" || \
|
||||||
|
die "Provisioning new dedicated host $INST_NAME failed. Manual debugging & cleanup required."
|
||||||
|
|
||||||
|
dbg $(jq . "$dh_allocate_json")
|
||||||
|
dhid=$(jq -r -e '.HostIds[0]' "$dh_allocate_json")
|
||||||
|
[[ -n "$dhid" ]] || \
|
||||||
|
die "Obtaining DH ID of new host. Manual debugging & cleanup required."
|
||||||
|
|
||||||
|
# There's a small delay between allocating the dedicated host and LaunchInstances.sh
|
||||||
|
# being able to interact with it. There's no sensible way to monitor for this state :(
|
||||||
|
sleep 3s
|
||||||
|
else # A dedicated host already exists
|
||||||
|
dhid=$(jq -r -e '.[0]' "$existing_dh_json")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Normally allocation is fairly instant, but not always. Confirm we're able to actually
|
||||||
|
# launch a mac instance onto the dedicated host.
|
||||||
|
for ((attempt=1 ; attempt < 11 ; attempt++)); do
|
||||||
|
msg "Attempt #$attempt launching a new instance on dedicated host"
|
||||||
|
./LaunchInstances.sh --force
|
||||||
|
if grep -E "^${INST_NAME}-0 i-" dh_status.txt; then
|
||||||
|
attempt=-1 # signal success
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1s
|
||||||
|
done
|
||||||
|
|
||||||
|
[[ "$attempt" -eq -1 ]] || \
|
||||||
|
die "Failed to use LaunchInstances.sh. Manual debugging & cleanup required."
|
||||||
|
|
||||||
|
# At this point the script could call SetupInstances.sh in another loop
|
||||||
|
# but it takes about 20-minutes to complete. Also, the developer may
|
||||||
|
# not need it, they may simply want to ssh into the instance to poke
|
||||||
|
# around. i.e. they don't need to run any Cirrus-CI jobs on the test
|
||||||
|
# instance.
|
||||||
|
warn "---"
|
||||||
|
warn "NOT copying/running setup.sh to new instance (in case manual activities are desired)."
|
||||||
|
warn "---"
|
||||||
|
|
||||||
|
w="PLEASE REMEMBER TO terminate instance, wait two hours, then
|
||||||
|
remove the dedicated-host in the web console, or run
|
||||||
|
'aws ec2 release-hosts --host-ids=$dhid'."
|
||||||
|
|
||||||
|
msg "---"
|
||||||
|
msg "Dropping you into a shell inside a temp. repo clone:
|
||||||
|
($TMP_CLONE_DIRPATH/$LIB_DIRNAME)"
|
||||||
|
msg "---"
|
||||||
|
msg "Once it finishes booting (5m), you may use './InstanceSSH.sh ${INST_NAME}-0'
|
||||||
|
to access it. Otherwise to fully setup the instance for Cirrus-CI, you need
|
||||||
|
to execute './SetupInstances.sh' repeatedly until the ${INST_NAME}-0 line in
|
||||||
|
'pw_status.txt' includes the text 'complete alive'. That process can take 20+
|
||||||
|
minutes. Once alive, you may then use Cirrus-CI to test against this specific
|
||||||
|
instance with any 'persistent_worker' task having a label of
|
||||||
|
'$DH_REQ_TAG=$DH_REQ_VAL' set."
|
||||||
|
msg "---"
|
||||||
|
warn "$w"
|
||||||
|
|
||||||
|
export POOLTOKEN # ensure availability in sub-shell
|
||||||
|
bash -l
|
||||||
|
|
||||||
|
warn "$w"
|
|
@ -0,0 +1,70 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Intended to be run from $HOME/deve/automation/mac_pw_pool/
|
||||||
|
# using a crontab like:
|
||||||
|
|
||||||
|
# # Every date/timestamp in PW Pool management is UTC-relative
|
||||||
|
# # make cron do the same for consistency.
|
||||||
|
# CRON_TZ=UTC
|
||||||
|
#
|
||||||
|
# PATH=/home/shared/.local/bin:/home/shared/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin
|
||||||
|
#
|
||||||
|
# # Keep log from filling up disk & make sure webserver is running
|
||||||
|
# # (5am UTC is during CI-activity lul)
|
||||||
|
# 59 4 * * * $HOME/devel/automation/mac_pw_pool/nightly_maintenance.sh &>> $CRONLOG
|
||||||
|
#
|
||||||
|
# # PW Pool management (usage drop-off from 03:00-15:00 UTC)
|
||||||
|
# POOLTOKEN=<from https://cirrus-ci.com/pool/1cf8c7f7d7db0b56aecd89759721d2e710778c523a8c91c7c3aaee5b15b48d05>
|
||||||
|
# CRONLOG=/home/shared/devel/automation/mac_pw_pool/Cron.log
|
||||||
|
# */5 * * * * /home/shared/devel/automation/mac_pw_pool/Cron.sh &>> $CRONLOG
|
||||||
|
|
||||||
|
# shellcheck disable=SC2154
|
||||||
|
[ "${FLOCKER}" != "$0" ] && exec env FLOCKER="$0" flock -e -w 300 "$0" "$0" "$@" || :
|
||||||
|
|
||||||
|
# shellcheck source=./pw_lib.sh
|
||||||
|
source $(dirname "${BASH_SOURCE[0]}")/pw_lib.sh
|
||||||
|
|
||||||
|
cd $SCRIPT_DIRPATH || die "Cannot enter '$SCRIPT_DIRPATH'"
|
||||||
|
|
||||||
|
# SSH agent required to provide key for accessing workers
|
||||||
|
# Started with `ssh-agent -s > /run/user/$UID/ssh-agent.env`
|
||||||
|
# followed by adding/unlocking the necessary keys.
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
source /run/user/$UID/ssh-agent.env
|
||||||
|
|
||||||
|
date -u -Iminutes
|
||||||
|
now_minutes=$(date -u +%M)
|
||||||
|
|
||||||
|
if (($now_minutes%10==0)); then
|
||||||
|
$SCRIPT_DIRPATH/LaunchInstances.sh
|
||||||
|
echo "Exit: $?"
|
||||||
|
fi
|
||||||
|
|
||||||
|
$SCRIPT_DIRPATH/SetupInstances.sh
|
||||||
|
echo "Exit: $?"
|
||||||
|
|
||||||
|
[[ -r "$PWSTATE" ]] || \
|
||||||
|
die "Can't read $PWSTATE to generate utilization data."
|
||||||
|
|
||||||
|
uzn_file="$SCRIPT_DIRPATH/utilization.csv"
|
||||||
|
# Run input through `date` to validate values are usable timestamps
|
||||||
|
timestamp=$(date -u -Iseconds -d \
|
||||||
|
$(grep -E '^# SetupInstances\.sh run ' "$PWSTATE" | \
|
||||||
|
awk '{print $4}'))
|
||||||
|
pw_state=$(grep -E -v '^($|#+| +)' "$PWSTATE")
|
||||||
|
n_workers=$(grep 'complete alive' <<<"$pw_state" | wc -l)
|
||||||
|
n_tasks=$(awk "BEGIN{B=0} /${DH_PFX}-[0-9]+ complete alive/{B+=\$4} END{print B}" <<<"$pw_state")
|
||||||
|
n_taskf=$(awk "BEGIN{E=0} /${DH_PFX}-[0-9]+ complete alive/{E+=\$5} END{print E}" <<<"$pw_state")
|
||||||
|
printf "%s,%i,%i,%i\n" "$timestamp" "$n_workers" "$n_tasks" "$n_taskf" | tee -a "$uzn_file"
|
||||||
|
|
||||||
|
# Prevent uncontrolled growth of utilization.csv. Assume this script
|
||||||
|
# runs every $interval minutes, keep only $history_hours worth of data.
|
||||||
|
interval_minutes=5
|
||||||
|
history_hours=36
|
||||||
|
lines_per_hour=$((60/$interval_minutes))
|
||||||
|
max_uzn_lines=$(($history_hours * $lines_per_hour))
|
||||||
|
tail -n $max_uzn_lines "$uzn_file" > "${uzn_file}.tmp"
|
||||||
|
mv "${uzn_file}.tmp" "$uzn_file"
|
||||||
|
|
||||||
|
# If possible, generate the webpage utilization graph
|
||||||
|
gnuplot -c Utilization.gnuplot || true
|
|
@ -0,0 +1,39 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
# Helper for humans to access an existing instance. It depends on:
|
||||||
|
#
|
||||||
|
# * You know the instance-id or name.
|
||||||
|
# * All requirements listed in the top `LaunchInstances.sh` comment.
|
||||||
|
# * The local ssh-agent is able to supply the appropriate private key.
|
||||||
|
|
||||||
|
# shellcheck source-path=SCRIPTDIR
|
||||||
|
source $(dirname ${BASH_SOURCE[0]})/pw_lib.sh
|
||||||
|
|
||||||
|
SSH="ssh $SSH_ARGS" # N/B: library default nulls stdin
|
||||||
|
if nc -z localhost 5900; then
|
||||||
|
# Enable access to VNC if it's running
|
||||||
|
# ref: https://repost.aws/knowledge-center/ec2-mac-instance-gui-access
|
||||||
|
SSH+=" -L 5900:localhost:5900"
|
||||||
|
fi
|
||||||
|
|
||||||
|
[[ -n "$1" ]] || \
|
||||||
|
die "Must provide EC2 instance ID as first argument"
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
i-*)
|
||||||
|
inst_json=$($AWS ec2 describe-instances --instance-ids "$1") ;;
|
||||||
|
*)
|
||||||
|
inst_json=$($AWS ec2 describe-instances --filter "Name=tag:Name,Values=$1") ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
shift
|
||||||
|
|
||||||
|
pub_dns=$(jq -r -e '.Reservations?[0]?.Instances?[0]?.PublicDnsName?' <<<"$inst_json")
|
||||||
|
if [[ -z "$pub_dns" ]] || [[ "$pub_dns" == "null" ]]; then
|
||||||
|
die "Instance '$1' does not exist, or have a public DNS address allocated (yet)."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "+ $SSH ec2-user@$pub_dns $*" >> /dev/stderr
|
||||||
|
exec $SSH ec2-user@$pub_dns "$@"
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue