Compare commits
97 Commits
Author | SHA1 | Date |
---|---|---|
|
0fd4e55fd5 | |
|
a3a8b7ca23 | |
|
85bc9560a1 | |
|
77fea64100 | |
|
24379a4cf5 | |
|
c3d53a443e | |
|
3a8c5f9fb6 | |
|
c97384262b | |
|
3007fdba71 | |
|
d34c718e26 | |
|
5d4ff56c0c | |
|
010949a925 | |
|
1368c96bae | |
|
f38fe91d46 | |
|
c4c86486cc | |
|
f92036c156 | |
|
ca381f217a | |
|
f044c7e10c | |
|
058de18c20 | |
|
203eea8d5d | |
|
9b99805024 | |
|
365534ebfe | |
|
3c624222bc | |
|
2f4b14f8ee | |
|
7e834d3cbe | |
|
56ebf6c1ea | |
|
7e88fed72c | |
|
371ecb8ae6 | |
|
fd8bfcdadd | |
|
e472ae020d | |
|
a6ca81cec2 | |
|
fac45dd5ba | |
|
c150b07f29 | |
|
bbbe75813f | |
|
61f7725152 | |
|
f917e0a36c | |
|
527971f55e | |
|
bfc70e666f | |
|
57a91849d8 | |
|
7235308e04 | |
|
daa48374b1 | |
|
e8849831d2 | |
|
7054b46daf | |
|
34a7f0f385 | |
|
108d9f3ad3 | |
|
b536f24818 | |
|
ca08bb1e74 | |
|
6ae1b9d55a | |
|
e75a2d3a54 | |
|
9f56d1c8ae | |
|
cec8a83ecb | |
|
f3b8f1d982 | |
|
c4aad1b75e | |
|
9d48125c8e | |
|
e46c204450 | |
|
356fd1fffa | |
|
c59c8dd581 | |
|
adb33a4306 | |
|
b4abd3ebfc | |
|
f8c799c213 | |
|
97f1f0ab32 | |
|
d08229681f | |
|
e0b5208767 | |
|
d02e7e5ff5 | |
|
16257d564e | |
|
44abffd4fe | |
|
c3faa3e042 | |
|
8209b3e0c1 | |
|
02d20ceadc | |
|
d99aca43af | |
|
e95f7ed7e2 | |
|
137a409756 | |
|
a75e8c2b84 | |
|
db1d6ed410 | |
|
8ec81ffede | |
|
1510ab7921 | |
|
cbd660df67 | |
|
e08da695c5 | |
|
5dacf8b1c9 | |
|
7e307b2a2d | |
|
5cb5f3796c | |
|
8db8d12e9c | |
|
3556188cd3 | |
|
02352b0772 | |
|
913eaa1189 | |
|
12ef0a82f0 | |
|
d406f55264 | |
|
672397c2d9 | |
|
091f6f9fc4 | |
|
2a29132efa | |
|
623a539c7c | |
|
49d8827a42 | |
|
0b9dcf3fbf | |
|
ee13b44943 | |
|
068e23330f | |
|
23a0845b5e | |
|
4f843ad11c |
124
.cirrus.yml
124
.cirrus.yml
|
@ -1,124 +0,0 @@
|
|||
---
|
||||
|
||||
env:
|
||||
DEST_BRANCH: "main"
|
||||
GOPATH: "/var/tmp/go"
|
||||
GOBIN: "${GOPATH}/bin"
|
||||
GOCACHE: "${GOPATH}/cache"
|
||||
GOSRC: "${GOPATH}/src/github.com/containers/podman"
|
||||
CIRRUS_WORKING_DIR: "${GOPATH}/src/github.com/containers/podman-py"
|
||||
SCRIPT_BASE: "./contrib/cirrus"
|
||||
CIRRUS_SHELL: "/bin/bash"
|
||||
HOME: "/root" # not set by default
|
||||
|
||||
####
|
||||
#### Cache-image names to test with (double-quotes around names are critical)
|
||||
####
|
||||
# Google-cloud VM Images
|
||||
IMAGE_SUFFIX: "c20250131t121915z-f41f40d13"
|
||||
FEDORA_CACHE_IMAGE_NAME: "fedora-podman-py-${IMAGE_SUFFIX}"
|
||||
|
||||
|
||||
gcp_credentials: ENCRYPTED[0c639039cdd3a9a93fac7746ea1bf366d432e5ff3303bf293e64a7ff38dee85fd445f71625fa5626dc438be2b8efe939]
|
||||
|
||||
|
||||
# Default VM to use unless set or modified by task
|
||||
gce_instance:
|
||||
image_project: "libpod-218412"
|
||||
zone: "us-central1-c" # Required by Cirrus for the time being
|
||||
cpu: 2
|
||||
memory: "4Gb"
|
||||
disk: 200 # Required for performance reasons
|
||||
image_name: "${FEDORA_CACHE_IMAGE_NAME}"
|
||||
|
||||
gating_task:
|
||||
name: "Gating test"
|
||||
alias: gating
|
||||
|
||||
# Only run this on PRs, never during post-merge testing. This is also required
|
||||
# for proper setting of EPOCH_TEST_COMMIT value, required by validation tools.
|
||||
only_if: $CIRRUS_PR != ""
|
||||
|
||||
timeout_in: 20m
|
||||
|
||||
env:
|
||||
PATH: ${PATH}:${GOPATH}/bin
|
||||
|
||||
script:
|
||||
- make
|
||||
- make lint
|
||||
|
||||
test_task:
|
||||
name: "Test on Fedora"
|
||||
alias: test
|
||||
|
||||
depends_on:
|
||||
- gating
|
||||
|
||||
script:
|
||||
- ${SCRIPT_BASE}/enable_ssh.sh
|
||||
- ${SCRIPT_BASE}/build_podman.sh
|
||||
- ${SCRIPT_BASE}/enable_podman.sh
|
||||
- ${SCRIPT_BASE}/test.sh
|
||||
|
||||
latest_task:
|
||||
name: "Test Podman main on Fedora"
|
||||
alias: latest
|
||||
allow_failures: true
|
||||
|
||||
depends_on:
|
||||
- gating
|
||||
|
||||
env:
|
||||
PATH: ${PATH}:${GOPATH}/bin
|
||||
|
||||
script:
|
||||
- ${SCRIPT_BASE}/enable_ssh.sh
|
||||
- ${SCRIPT_BASE}/build_podman.sh
|
||||
- ${SCRIPT_BASE}/enable_podman.sh
|
||||
- ${SCRIPT_BASE}/test.sh
|
||||
|
||||
# This task is critical. It updates the "last-used by" timestamp stored
|
||||
# in metadata for all VM images. This mechanism functions in tandem with
|
||||
# an out-of-band pruning operation to remove disused VM images.
|
||||
meta_task:
|
||||
alias: meta
|
||||
name: "VM img. keepalive"
|
||||
|
||||
container: &smallcontainer
|
||||
image: "quay.io/libpod/imgts:latest"
|
||||
cpu: 1
|
||||
memory: 1
|
||||
|
||||
env:
|
||||
IMGNAMES: ${FEDORA_CACHE_IMAGE_NAME}
|
||||
BUILDID: "${CIRRUS_BUILD_ID}"
|
||||
REPOREF: "${CIRRUS_REPO_NAME}"
|
||||
GCPJSON: ENCRYPTED[e8a53772eff6e86bf6b99107b6e6ee3216e2ca00c36252ae3bd8cb29d9b903ffb2e1a1322ea810ca251b04f833b8f8d9]
|
||||
GCPNAME: ENCRYPTED[fb878daf188d35c2ed356dc777267d99b59863ff3abf0c41199d562fca50ba0668fdb0d87e109c9eaa2a635d2825feed]
|
||||
GCPPROJECT: "libpod-218412"
|
||||
|
||||
clone_script: &noop mkdir -p $CIRRUS_WORKING_DIR
|
||||
script: /usr/local/bin/entrypoint.sh
|
||||
|
||||
# Status aggregator for all tests. This task simply ensures a defined
|
||||
# set of tasks all passed, and allows confirming that based on the status
|
||||
# of this task.
|
||||
success_task:
|
||||
name: "Total Success"
|
||||
alias: success
|
||||
|
||||
# N/B: ALL tasks must be listed here, minus their '_task' suffix.
|
||||
depends_on:
|
||||
- meta
|
||||
- gating
|
||||
- test
|
||||
- latest
|
||||
container:
|
||||
image: quay.io/libpod/alpine:latest
|
||||
cpu: 1
|
||||
memory: 1
|
||||
env:
|
||||
CIRRUS_SHELL: "/bin/sh"
|
||||
clone_script: *noop
|
||||
script: *noop
|
|
@ -9,8 +9,8 @@ jobs:
|
|||
env:
|
||||
SKIP: no-commit-to-branch
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: |
|
||||
3.9
|
||||
|
|
|
@ -11,9 +11,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
|
@ -46,7 +46,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
@ -68,12 +68,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
- name: Sign the dists with Sigstore
|
||||
uses: sigstore/gh-action-sigstore-python@v3.0.0
|
||||
uses: sigstore/gh-action-sigstore-python@v3.0.1
|
||||
with:
|
||||
inputs: >-
|
||||
./dist/*.tar.gz
|
||||
|
@ -114,7 +114,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
@ -122,3 +122,5 @@ jobs:
|
|||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
repository-url: https://test.pypi.org/legacy/
|
||||
skip_existing: true
|
||||
verbose: true
|
||||
|
|
70
.packit.yaml
70
.packit.yaml
|
@ -4,6 +4,26 @@
|
|||
|
||||
upstream_tag_template: v{version}
|
||||
|
||||
files_to_sync:
|
||||
- src: rpm/gating.yml
|
||||
dest: gating.yml
|
||||
delete: true
|
||||
- src: pyproject.toml
|
||||
dest: pyproject.toml
|
||||
delete: true
|
||||
- src: plans/
|
||||
dest: plans/
|
||||
delete: true
|
||||
mkpath: true
|
||||
- src: tests/
|
||||
dest: tests/
|
||||
delete: true
|
||||
mkpath: true
|
||||
- src: .fmf/
|
||||
dest: .fmf/
|
||||
delete: true
|
||||
mkpath: true
|
||||
|
||||
packages:
|
||||
python-podman-fedora:
|
||||
pkg_tool: fedpkg
|
||||
|
@ -82,29 +102,63 @@ jobs:
|
|||
dist_git_branches:
|
||||
- fedora-branched # rawhide updates are created automatically
|
||||
|
||||
|
||||
# Test linting on the codebase
|
||||
# This test might break based on the OS and lint used, so we follow fedora-latest as a reference
|
||||
- job: tests
|
||||
trigger: pull_request
|
||||
identifier: upstream-sanity
|
||||
tmt_plan: /upstream/sanity
|
||||
identifier: distro-sanity
|
||||
tmt_plan: /distro/sanity
|
||||
packages: [python-podman-fedora]
|
||||
targets:
|
||||
- fedora-latest-stable
|
||||
skip_build: true
|
||||
|
||||
|
||||
# test unit test coverage
|
||||
- job: tests
|
||||
trigger: pull_request
|
||||
identifier: upstream-all-fedora
|
||||
tmt_plan: /upstream/all
|
||||
identifier: unittest-coverage
|
||||
tmt_plan: /distro/unittest_coverage
|
||||
packages: [python-podman-fedora]
|
||||
targets:
|
||||
- fedora-latest-stable
|
||||
skip_build: true
|
||||
|
||||
|
||||
# TODO: test integration test coverage
|
||||
|
||||
# run all tests for all python versions on all fedoras
|
||||
- job: tests
|
||||
trigger: pull_request
|
||||
identifier: distro-fedora-all
|
||||
tmt_plan: /distro/all_python
|
||||
packages: [python-podman-fedora]
|
||||
targets:
|
||||
- fedora-all
|
||||
|
||||
# run tests for the rawhide python version using podman-next packages
|
||||
- job: tests
|
||||
trigger: pull_request
|
||||
identifier: upstream-base-centos
|
||||
tmt_plan: /upstream/base
|
||||
identifier: podman-next-fedora-base
|
||||
tmt_plan: /pnext/base_python
|
||||
packages: [python-podman-fedora]
|
||||
targets:
|
||||
- fedora-rawhide
|
||||
tf_extra_params:
|
||||
environments:
|
||||
- artifacts:
|
||||
- type: repository-file
|
||||
id: https://copr.fedorainfracloud.org/coprs/rhcontainerbot/podman-next/repo/fedora-$releasever/rhcontainerbot-podman-next-fedora-$releasever.repo
|
||||
manual_trigger: true
|
||||
labels:
|
||||
- pnext
|
||||
- podman-next
|
||||
|
||||
- job: tests
|
||||
trigger: pull_request
|
||||
identifier: distro-centos-base
|
||||
tmt_plan: /distro/base_python
|
||||
packages: [python-podman-centos]
|
||||
targets:
|
||||
- centos-stream-9
|
||||
|
@ -112,8 +166,8 @@ jobs:
|
|||
|
||||
- job: tests
|
||||
trigger: pull_request
|
||||
identifier: upstream-base-rhel
|
||||
tmt_plan: /upstream/base
|
||||
identifier: distro-rhel-base
|
||||
tmt_plan: /distro/base_python
|
||||
packages: [python-podman-rhel]
|
||||
targets:
|
||||
- epel-9
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.3.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: check-yaml
|
||||
exclude: "gating.yml"
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.8.1
|
||||
rev: v0.12.8
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
@ -18,3 +19,9 @@ repos:
|
|||
rev: 1.39.0
|
||||
hooks:
|
||||
- id: tmt-lint
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.15.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
pass_filenames: false
|
||||
args: ["--package", "podman"]
|
||||
|
|
|
@ -21,7 +21,10 @@ build:
|
|||
# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: requirements.txt
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- docs
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
|
|
|
@ -45,6 +45,45 @@ pip install tox
|
|||
tox -e coverage
|
||||
```
|
||||
|
||||
#### Advanced testing
|
||||
|
||||
Always prefer to run `tox` directly, even when you want to run a specific test or scenario.
|
||||
Instead of running `pytest` directly, you should run:
|
||||
|
||||
```
|
||||
tox -e py -- podman/tests/integration/test_container_create.py -k test_container_directory_volume_mount
|
||||
```
|
||||
|
||||
If you'd like to test against a specific `tox` environment you can do:
|
||||
|
||||
```
|
||||
tox -e py12 -- podman/tests/integration/test_container_create.py -k test_container_directory_volume_mount
|
||||
```
|
||||
|
||||
Pass pytest options after `--`.
|
||||
|
||||
#### Testing future features
|
||||
|
||||
Since `podman-py` follows stable releases of `podman`, tests are thought to be run against
|
||||
libpod's versions that are commonly installed in the distributions. Tests can be versioned,
|
||||
but preferably they should not. Occasionally, upstream can diverge and have features that
|
||||
are not included in a specific version of libpod, or that will be included eventually.
|
||||
To run a test against such changes, you need to have
|
||||
[podman-next](https://copr.fedorainfracloud.org/coprs/rhcontainerbot/podman-next) installed.
|
||||
Then, you need to mark the test as `@pytest.mark.pnext`. Marked tests willbe excluded from the
|
||||
runs, unless you pass `--pnext` as a cli option.
|
||||
Preferably, this should be a rare case and it's better to use this marker as a temporary solution,
|
||||
with the goal of removing the marker within few PRs.
|
||||
|
||||
To run these tests use:
|
||||
|
||||
```
|
||||
tox -e py -- --pnext -m pnext podman/tests/integration/test_container_create.py -k test_container_mounts_without_rw_as_default
|
||||
```
|
||||
|
||||
The option `--pnext` **enables** the tests with the `pnext` pytest marker, and `-m pnext` will run
|
||||
the marked tests **only**.
|
||||
|
||||
## Submitting changes
|
||||
|
||||
- Create a github pull request (PR)
|
||||
|
|
10
Makefile
10
Makefile
|
@ -8,7 +8,7 @@ DESTDIR ?=
|
|||
EPOCH_TEST_COMMIT ?= $(shell git merge-base $${DEST_BRANCH:-main} HEAD)
|
||||
HEAD ?= HEAD
|
||||
|
||||
export PODMAN_VERSION ?= "5.4.0"
|
||||
export PODMAN_VERSION ?= "5.6.0"
|
||||
|
||||
.PHONY: podman
|
||||
podman:
|
||||
|
@ -19,17 +19,23 @@ podman:
|
|||
|
||||
.PHONY: lint
|
||||
lint: tox
|
||||
$(PYTHON) -m tox -e format,lint
|
||||
$(PYTHON) -m tox -e format,lint,mypy
|
||||
|
||||
.PHONY: tests
|
||||
tests: tox
|
||||
# see tox.ini for environment variable settings
|
||||
$(PYTHON) -m tox -e coverage,py39,py310,py311,py312,py313
|
||||
|
||||
.PHONY: tests-ci-base-python-podman-next
|
||||
tests-ci-base-python-podman-next:
|
||||
$(PYTHON) -m tox -e py -- --pnext -m pnext
|
||||
|
||||
.PHONY: tests-ci-base-python
|
||||
tests-ci-base-python:
|
||||
$(PYTHON) -m tox -e coverage,py
|
||||
|
||||
# TODO: coverage is probably not necessary here and in tests-ci-base-python
|
||||
# but for now it's ok to leave it here so it's run
|
||||
.PHONY: tests-ci-all-python
|
||||
tests-ci-all-python:
|
||||
$(PYTHON) -m tox -e coverage,py39,py310,py311,py312,py313
|
||||
|
|
1
OWNERS
1
OWNERS
|
@ -12,6 +12,7 @@ approvers:
|
|||
reviewers:
|
||||
- ashley-cui
|
||||
- baude
|
||||
- Honny1
|
||||
- rhatdan
|
||||
- TomSweeneyRedHat
|
||||
- Edward5hen
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# podman-py
|
||||
[](https://cirrus-ci.com/github/containers/podman-py/main)
|
||||
[](https://pypi.org/project/podman/)
|
||||
|
||||
This python package is a library of bindings to use the RESTful API of [Podman](https://github.com/containers/podman).
|
||||
It is currently under development and contributors are welcome!
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -xeo pipefail
|
||||
|
||||
systemctl stop podman.socket || :
|
||||
|
||||
dnf remove podman -y
|
||||
dnf copr enable rhcontainerbot/podman-next -y
|
||||
dnf install podman -y
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
systemctl enable podman.socket podman.service
|
||||
systemctl start podman.socket
|
||||
systemctl status podman.socket ||:
|
||||
|
||||
# log which version of podman we just enabled
|
||||
echo "Locate podman: $(type -P podman)"
|
||||
podman --version
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
systemctl enable sshd
|
||||
systemctl start sshd
|
||||
systemctl status sshd ||:
|
||||
|
||||
ssh-keygen -t ecdsa -b 521 -f /root/.ssh/id_ecdsa -P ""
|
||||
cp /root/.ssh/authorized_keys /root/.ssh/authorized_keys%
|
||||
cat /root/.ssh/id_ecdsa.pub >>/root/.ssh/authorized_keys
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
make tests
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
!Policy
|
||||
product_versions:
|
||||
- fedora-*
|
||||
decision_contexts:
|
||||
- bodhi_update_push_stable
|
||||
- bodhi_update_push_testing
|
||||
subject_type: koji_build
|
||||
rules:
|
||||
- !PassingTestCaseRule {test_case_name: fedora-ci.koji-build./plans/downstream/all.functional}
|
|
@ -1,61 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# For help and usage information, simply execute the script w/o any arguments.
|
||||
#
|
||||
# This script is intended to be run by Red Hat podman-py developers who need
|
||||
# to debug problems specifically related to Cirrus-CI automated testing.
|
||||
# It requires that you have been granted prior access to create VMs in
|
||||
# google-cloud. For non-Red Hat contributors, VMs are available as-needed,
|
||||
# with supervision upon request.
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_FILEPATH=$(realpath "${BASH_SOURCE[0]}")
|
||||
SCRIPT_DIRPATH=$(dirname "$SCRIPT_FILEPATH")
|
||||
REPO_DIRPATH=$(realpath "$SCRIPT_DIRPATH/../")
|
||||
|
||||
# Help detect if we were called by get_ci_vm container
|
||||
GET_CI_VM="${GET_CI_VM:-0}"
|
||||
in_get_ci_vm() {
|
||||
if ((GET_CI_VM==0)); then
|
||||
echo "Error: $1 is not intended for use in this context"
|
||||
exit 2
|
||||
fi
|
||||
}
|
||||
|
||||
# get_ci_vm APIv1 container entrypoint calls into this script
|
||||
# to obtain required repo. specific configuration options.
|
||||
if [[ "$1" == "--config" ]]; then
|
||||
in_get_ci_vm "$1"
|
||||
cat <<EOF
|
||||
DESTDIR="/var/tmp/go/src/github.com/containers/podman-py"
|
||||
UPSTREAM_REPO="https://github.com/containers/podman-py.git"
|
||||
CI_ENVFILE="/etc/ci_environment"
|
||||
GCLOUD_PROJECT="podman-py"
|
||||
GCLOUD_IMGPROJECT="libpod-218412"
|
||||
GCLOUD_CFG="podman-py"
|
||||
GCLOUD_ZONE="${GCLOUD_ZONE:-us-central1-c}"
|
||||
GCLOUD_CPUS="2"
|
||||
GCLOUD_MEMORY="4Gb"
|
||||
GCLOUD_DISK="200"
|
||||
EOF
|
||||
elif [[ "$1" == "--setup" ]]; then
|
||||
in_get_ci_vm "$1"
|
||||
echo "+ Setting up and Running make" > /dev/stderr
|
||||
echo 'PATH=$PATH:$GOPATH/bin' > /etc/ci_environment
|
||||
make
|
||||
else
|
||||
# Create and access VM for specified Cirrus-CI task
|
||||
mkdir -p $HOME/.config/gcloud/ssh
|
||||
podman run -it --rm \
|
||||
--tz=local \
|
||||
-e NAME="$USER" \
|
||||
-e SRCDIR=/src \
|
||||
-e GCLOUD_ZONE="$GCLOUD_ZONE" \
|
||||
-e DEBUG="${DEBUG:-0}" \
|
||||
-v $REPO_DIRPATH:/src:O \
|
||||
-v $HOME/.config/gcloud:/root/.config/gcloud:z \
|
||||
-v $HOME/.config/gcloud/ssh:/root/.ssh:z \
|
||||
quay.io/libpod/get_ci_vm:latest "$@"
|
||||
fi
|
|
@ -9,8 +9,8 @@ prepare:
|
|||
how: install
|
||||
package:
|
||||
- make
|
||||
- podman
|
||||
- python3-pip
|
||||
- podman
|
||||
|
||||
- name: pip dependencies
|
||||
how: shell
|
||||
|
@ -24,39 +24,90 @@ prepare:
|
|||
- cp /root/.ssh/authorized_keys /root/.ssh/authorized_keys%
|
||||
- cat /root/.ssh/id_ecdsa.pub >>/root/.ssh/authorized_keys
|
||||
|
||||
/upstream:
|
||||
# Run tests agains Podman Next builds.
|
||||
# These tests should NOT overlap with the ones who run in the distro plan and should only include
|
||||
# tests against upcoming features or upstream tests that we need to run for reasons.
|
||||
/pnext:
|
||||
prepare+:
|
||||
- name: enable rhcontainerbot/podman-next update podman
|
||||
when: initiator == packit
|
||||
how: shell
|
||||
script: |
|
||||
COPR_REPO_FILE="/etc/yum.repos.d/*podman-next*.repo"
|
||||
if compgen -G $COPR_REPO_FILE > /dev/null; then
|
||||
sed -i -n '/^priority=/!p;$apriority=1' $COPR_REPO_FILE
|
||||
fi
|
||||
dnf -y upgrade --allowerasing
|
||||
|
||||
/base_python:
|
||||
summary: Run Tests Upstream PRs for base Python
|
||||
discover+:
|
||||
filter: tag:pnext
|
||||
|
||||
adjust+:
|
||||
enabled: false
|
||||
when: initiator is not defined or initiator != packit
|
||||
|
||||
|
||||
# Run tests against Podman buids installed from the distribution.
|
||||
/distro:
|
||||
prepare+:
|
||||
- name: Enable testing repositories
|
||||
when: initiator == packit && distro == fedora
|
||||
how: shell
|
||||
script: |
|
||||
dnf config-manager setopt updates-testing.enabled=true
|
||||
dnf -y upgrade --allowerasing --setopt=allow_vendor_change=true
|
||||
|
||||
/sanity:
|
||||
summary: Run Sanity and Coverage checks on Python Podman
|
||||
discover+:
|
||||
# we want to change this to tag:stable once all the coverage tests are fixed
|
||||
filter: tag:lint
|
||||
|
||||
/base:
|
||||
summary: Run Python Podman Tests on Upstream PRs for base Python
|
||||
/base_python:
|
||||
summary: Run Tests Upstream for base Python
|
||||
discover+:
|
||||
filter: tag:base
|
||||
|
||||
/all:
|
||||
summary: Run Python Podman Tests on Upstream PRs for all Python versions
|
||||
/all_python:
|
||||
summary: Run Tests Upstream PRs for all Python versions
|
||||
prepare+:
|
||||
- name: install all python versions
|
||||
how: install
|
||||
package:
|
||||
- python3.9
|
||||
- python3.10
|
||||
- python3.11
|
||||
- python3.12
|
||||
- python3.13
|
||||
- name: install all python versions
|
||||
how: install
|
||||
package:
|
||||
- python3.9
|
||||
- python3.10
|
||||
- python3.11
|
||||
- python3.12
|
||||
- python3.13
|
||||
discover+:
|
||||
filter: tag:matrix
|
||||
|
||||
# TODO: replace with /coverage and include integration tests coverage
|
||||
/unittest_coverage:
|
||||
summary: Run Unit test coverage
|
||||
discover+:
|
||||
filter: tag:coverage & tag:unittest
|
||||
|
||||
adjust+:
|
||||
enabled: false
|
||||
when: initiator is not defined or initiator != packit
|
||||
|
||||
# Run tests against downstream Podman. These tests should be the all_python only since the sanity
|
||||
# of code is tested in the distro environment
|
||||
/downstream:
|
||||
/all:
|
||||
summary: Run Python Podman Tests on bodhi / errata and dist-git PRs
|
||||
summary: Run Tests on bodhi / errata and dist-git PRs
|
||||
prepare+:
|
||||
- name: install all python versions
|
||||
how: install
|
||||
package:
|
||||
- python3.9
|
||||
- python3.10
|
||||
- python3.11
|
||||
- python3.12
|
||||
- python3.13
|
||||
discover+:
|
||||
filter: tag:matrix
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
from podman.api.cached_property import cached_property
|
||||
from podman.api.client import APIClient
|
||||
from podman.api.api_versions import VERSION, COMPATIBLE_VERSION
|
||||
from podman.api.http_utils import prepare_body, prepare_filters
|
||||
from podman.api.http_utils import encode_auth_header, prepare_body, prepare_filters
|
||||
from podman.api.parse_utils import (
|
||||
decode_header,
|
||||
frames,
|
||||
|
@ -27,6 +27,7 @@ __all__ = [
|
|||
'cached_property',
|
||||
'create_tar',
|
||||
'decode_header',
|
||||
'encode_auth_header',
|
||||
'frames',
|
||||
'parse_repository',
|
||||
'prepare_body',
|
||||
|
|
|
@ -6,5 +6,5 @@ try:
|
|||
from functools import cached_property # pylint: disable=unused-import
|
||||
except ImportError:
|
||||
|
||||
def cached_property(fn):
|
||||
def cached_property(fn): # type: ignore[no-redef]
|
||||
return property(functools.lru_cache()(fn))
|
||||
|
|
|
@ -18,7 +18,7 @@ from requests.adapters import HTTPAdapter
|
|||
from podman.api.api_versions import VERSION, COMPATIBLE_VERSION
|
||||
from podman.api.ssh import SSHAdapter
|
||||
from podman.api.uds import UDSAdapter
|
||||
from podman.errors import APIError, NotFound
|
||||
from podman.errors import APIError, NotFound, PodmanError
|
||||
from podman.tlsconfig import TLSConfig
|
||||
from podman.version import __version__
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@ def _filter_values(mapping: Mapping[str, Any], recursion=False) -> dict[str, Any
|
|||
continue
|
||||
|
||||
# depending on type we need details...
|
||||
proposal: Any
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
proposal = _filter_values(value, recursion=True)
|
||||
elif isinstance(value, collections.abc.Iterable) and not isinstance(value, str):
|
||||
|
@ -100,5 +101,5 @@ def _filter_values(mapping: Mapping[str, Any], recursion=False) -> dict[str, Any
|
|||
return canonical
|
||||
|
||||
|
||||
def encode_auth_header(auth_config: dict[str, str]) -> str:
|
||||
def encode_auth_header(auth_config: dict[str, str]) -> bytes:
|
||||
return base64.urlsafe_b64encode(json.dumps(auth_config).encode('utf-8'))
|
||||
|
|
|
@ -4,26 +4,21 @@ import base64
|
|||
import ipaddress
|
||||
import json
|
||||
import struct
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional, Union
|
||||
from collections.abc import Iterator
|
||||
|
||||
from requests import Response
|
||||
from podman.api.client import APIResponse
|
||||
from .output_utils import demux_output
|
||||
|
||||
|
||||
def parse_repository(name: str) -> tuple[str, Optional[str]]:
|
||||
"""Parse repository image name from tag or digest
|
||||
"""Parse repository image name from tag.
|
||||
|
||||
Returns:
|
||||
item 1: repository name
|
||||
item 2: Either digest and tag, tag, or None
|
||||
item 2: Either tag or None
|
||||
"""
|
||||
# split image name and digest
|
||||
elements = name.split("@", 1)
|
||||
if len(elements) == 2:
|
||||
return elements[0], elements[1]
|
||||
|
||||
# split repository and image name from tag
|
||||
# tags need to be split from the right since
|
||||
# a port number might increase the split list len by 1
|
||||
|
@ -53,13 +48,15 @@ def prepare_timestamp(value: Union[datetime, int, None]) -> Optional[int]:
|
|||
return value
|
||||
|
||||
if isinstance(value, datetime):
|
||||
delta = value - datetime.utcfromtimestamp(0)
|
||||
if value.tzinfo is None:
|
||||
value = value.replace(tzinfo=timezone.utc)
|
||||
delta = value - datetime.fromtimestamp(0, timezone.utc)
|
||||
return delta.seconds + delta.days * 24 * 3600
|
||||
|
||||
raise ValueError(f"Type '{type(value)}' is not supported by prepare_timestamp()")
|
||||
|
||||
|
||||
def prepare_cidr(value: Union[ipaddress.IPv4Network, ipaddress.IPv6Network]) -> (str, str):
|
||||
def prepare_cidr(value: Union[ipaddress.IPv4Network, ipaddress.IPv6Network]) -> tuple[str, str]:
|
||||
"""Returns network address and Base64 encoded netmask from CIDR.
|
||||
|
||||
The return values are dictated by the Go JSON decoder.
|
||||
|
@ -67,7 +64,7 @@ def prepare_cidr(value: Union[ipaddress.IPv4Network, ipaddress.IPv6Network]) ->
|
|||
return str(value.network_address), base64.b64encode(value.netmask.packed).decode("utf-8")
|
||||
|
||||
|
||||
def frames(response: Response) -> Iterator[bytes]:
|
||||
def frames(response: APIResponse) -> Iterator[bytes]:
|
||||
"""Returns each frame from multiplexed payload, all results are expected in the payload.
|
||||
|
||||
The stdout and stderr frames are undifferentiated as they are returned.
|
||||
|
@ -84,7 +81,7 @@ def frames(response: Response) -> Iterator[bytes]:
|
|||
|
||||
|
||||
def stream_frames(
|
||||
response: Response, demux: bool = False
|
||||
response: APIResponse, demux: bool = False
|
||||
) -> Iterator[Union[bytes, tuple[bytes, bytes]]]:
|
||||
"""Returns each frame from multiplexed streamed payload.
|
||||
|
||||
|
@ -111,7 +108,7 @@ def stream_frames(
|
|||
|
||||
|
||||
def stream_helper(
|
||||
response: Response, decode_to_json: bool = False
|
||||
response: APIResponse, decode_to_json: bool = False
|
||||
) -> Union[Iterator[bytes], Iterator[dict[str, Any]]]:
|
||||
"""Helper to stream results and optionally decode to json"""
|
||||
for value in response.iter_lines():
|
||||
|
|
|
@ -24,7 +24,7 @@ def prepare_containerignore(anchor: str) -> list[str]:
|
|||
with ignore.open(encoding='utf-8') as file:
|
||||
return list(
|
||||
filter(
|
||||
lambda L: L and not L.startswith("#"),
|
||||
lambda i: i and not i.startswith("#"),
|
||||
(line.strip() for line in file.readlines()),
|
||||
)
|
||||
)
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
import json
|
||||
import logging
|
||||
import shlex
|
||||
from collections.abc import Iterable, Iterator, Mapping
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Union
|
||||
from collections.abc import Iterable, Iterator, Mapping
|
||||
|
||||
import requests
|
||||
|
||||
|
@ -506,13 +506,228 @@ class Container(PodmanResource):
|
|||
response = self.client.post(f"/containers/{self.id}/unpause")
|
||||
response.raise_for_status()
|
||||
|
||||
def update(self, **kwargs):
|
||||
def update(self, **kwargs) -> None:
|
||||
"""Update resource configuration of the containers.
|
||||
Keyword Args:
|
||||
Please refer to Podman API documentation for details:
|
||||
https://docs.podman.io/en/latest/_static/api.html#tag/containers/operation/ContainerUpdateLibpod
|
||||
|
||||
restart_policy (str): New restart policy for the container.
|
||||
restart_retries (int): New amount of retries for the container's restart policy.
|
||||
Only allowed if restartPolicy is set to on-failure
|
||||
|
||||
blkio_weight_device tuple(str, int):Block IO weight (relative device weight)
|
||||
in the form: (device_path, weight)
|
||||
blockio (dict): LinuxBlockIO for Linux cgroup 'blkio' resource management
|
||||
Example:
|
||||
blockio = {
|
||||
"leafWeight": 0
|
||||
"throttleReadBpsDevice": [{
|
||||
"major": 0,
|
||||
"minor": 0,
|
||||
"rate": 0
|
||||
}],
|
||||
"throttleReadIopsDevice": [{
|
||||
"major": 0,
|
||||
"minor": 0,
|
||||
"rate": 0
|
||||
}],
|
||||
"throttleWriteBpsDevice": [{
|
||||
"major": 0,
|
||||
"minor": 0,
|
||||
"rate": 0
|
||||
}],
|
||||
"throttleWriteIopsDevice": [{
|
||||
"major": 0,
|
||||
"minor": 0,
|
||||
"rate": 0
|
||||
}],
|
||||
"weight": 0,
|
||||
"weightDevice": [{
|
||||
"leafWeight": 0,
|
||||
"major": 0,
|
||||
"minor": 0,
|
||||
"weight": 0
|
||||
}],
|
||||
}
|
||||
cpu (dict): LinuxCPU for Linux cgroup 'cpu' resource management
|
||||
Example:
|
||||
cpu = {
|
||||
"burst": 0,
|
||||
"cpus": "string",
|
||||
"idle": 0,
|
||||
"mems": "string",
|
||||
"period": 0
|
||||
"quota": 0,
|
||||
"realtimePeriod": 0,
|
||||
"realtimeRuntime": 0,
|
||||
"shares": 0
|
||||
}
|
||||
device_read_bps (list(dict)): Limit read rate (bytes per second) from a device,
|
||||
in the form: [{"Path": "string", "Rate": 0}]
|
||||
device_read_iops (list(dict)): Limit read rate (IO operations per second) from a device,
|
||||
in the form: [{"Path": "string", "Rate": 0}]
|
||||
device_write_bps (list(dict)): Limit write rate (bytes per second) to a device,
|
||||
in the form: [{"Path": "string", "Rate": 0}]
|
||||
device_write_iops (list(dict)): Limit write rate (IO operations per second) to a device,
|
||||
in the form: [{"Path": "string", "Rate": 0}]
|
||||
devices (list(dict)): Devices configures the device allowlist.
|
||||
Example:
|
||||
devices = [{
|
||||
access: "string"
|
||||
allow: 0,
|
||||
major: 0,
|
||||
minor: 0,
|
||||
type: "string"
|
||||
}]
|
||||
health_cmd (str): set a healthcheck command for the container ('None' disables the
|
||||
existing healthcheck)
|
||||
health_interval (str): set an interval for the healthcheck (a value of disable results
|
||||
in no automatic timer setup)(Changing this setting resets timer.) (default "30s")
|
||||
health_log_destination (str): set the destination of the HealthCheck log. Directory
|
||||
path, local or events_logger (local use container state file)(Warning: Changing
|
||||
this setting may cause the loss of previous logs.) (default "local")
|
||||
health_max_log_count (int): set maximum number of attempts in the HealthCheck log file.
|
||||
('0' value means an infinite number of attempts in the log file) (default 5)
|
||||
health_max_logs_size (int): set maximum length in characters of stored HealthCheck log.
|
||||
('0' value means an infinite log length) (default 500)
|
||||
health_on_failure (str): action to take once the container turns unhealthy
|
||||
(default "none")
|
||||
health_retries (int): the number of retries allowed before a healthcheck is considered
|
||||
to be unhealthy (default 3)
|
||||
health_start_period (str): the initialization time needed for a container to bootstrap
|
||||
(default "0s")
|
||||
health_startup_cmd (str): Set a startup healthcheck command for the container
|
||||
health_startup_interval (str): Set an interval for the startup healthcheck. Changing
|
||||
this setting resets the timer, depending on the state of the container.
|
||||
(default "30s")
|
||||
health_startup_retries (int): Set the maximum number of retries before the startup
|
||||
healthcheck will restart the container
|
||||
health_startup_success (int): Set the number of consecutive successes before the
|
||||
startup healthcheck is marked as successful and the normal healthcheck begins
|
||||
(0 indicates any success will start the regular healthcheck)
|
||||
health_startup_timeout (str): Set the maximum amount of time that the startup
|
||||
healthcheck may take before it is considered failed (default "30s")
|
||||
health_timeout (str): the maximum time allowed to complete the healthcheck before an
|
||||
interval is considered failed (default "30s")
|
||||
no_healthcheck (bool): Disable healthchecks on container
|
||||
hugepage_limits (list(dict)): Hugetlb limits (in bytes).
|
||||
Default to reservation limits if supported.
|
||||
Example:
|
||||
huugepage_limits = [{"limit": 0, "pageSize": "string"}]
|
||||
memory (dict): LinuxMemory for Linux cgroup 'memory' resource management
|
||||
Example:
|
||||
memory = {
|
||||
"checkBeforeUpdate": True,
|
||||
"disableOOMKiller": True,
|
||||
"kernel": 0,
|
||||
"kernelTCP": 0,
|
||||
"limit": 0,
|
||||
"reservation": 0,
|
||||
"swap": 0,
|
||||
"swappiness": 0,
|
||||
"useHierarchy": True,
|
||||
}
|
||||
network (dict): LinuxNetwork identification and priority configuration
|
||||
Example:
|
||||
network = {
|
||||
"classID": 0,
|
||||
"priorities": {
|
||||
"name": "string",
|
||||
"priority": 0
|
||||
}
|
||||
)
|
||||
pids (dict): LinuxPids for Linux cgroup 'pids' resource management (Linux 4.3)
|
||||
Example:
|
||||
pids = {
|
||||
"limit": 0
|
||||
}
|
||||
rdma (dict): Rdma resource restriction configuration. Limits are a set of key value
|
||||
pairs that define RDMA resource limits, where the key is device name and value
|
||||
is resource limits.
|
||||
Example:
|
||||
rdma = {
|
||||
"property1": {
|
||||
"hcaHandles": 0
|
||||
"hcaObjects": 0
|
||||
},
|
||||
"property2": {
|
||||
"hcaHandles": 0
|
||||
"hcaObjects": 0
|
||||
},
|
||||
...
|
||||
}
|
||||
unified (dict): Unified resources.
|
||||
Example:
|
||||
unified = {
|
||||
"property1": "value1",
|
||||
"property2": "value2",
|
||||
...
|
||||
}
|
||||
|
||||
Raises:
|
||||
NotImplementedError: Podman service unsupported operation.
|
||||
"""
|
||||
raise NotImplementedError("Container.update() is not supported by Podman service.")
|
||||
|
||||
data = {}
|
||||
params = {}
|
||||
|
||||
health_commands_data = [
|
||||
"health_cmd",
|
||||
"health_interval",
|
||||
"health_log_destination",
|
||||
"health_max_log_count",
|
||||
"health_max_logs_size",
|
||||
"health_on_failure",
|
||||
"health_retries",
|
||||
"health_start_period",
|
||||
"health_startup_cmd",
|
||||
"health_startup_interval",
|
||||
"health_startup_retries",
|
||||
"health_startup_success",
|
||||
"health_startup_timeout",
|
||||
"health_timeout",
|
||||
]
|
||||
# the healthcheck section of parameters accepted can be either no_healthcheck or a series
|
||||
# of healthcheck parameters
|
||||
if kwargs.get("no_healthcheck"):
|
||||
for command in health_commands_data:
|
||||
if command in kwargs:
|
||||
raise ValueError(f"Cannot set {command} when no_healthcheck is True")
|
||||
data["no_healthcheck"] = kwargs.get("no_healthcheck")
|
||||
else:
|
||||
for hc in health_commands_data:
|
||||
if hc in kwargs:
|
||||
data[hc] = kwargs.get(hc)
|
||||
|
||||
data_mapping = {
|
||||
"BlkIOWeightDevice": "blkio_weight_device",
|
||||
"blockio": "blockIO",
|
||||
"cpu": "cpu",
|
||||
"device_read_bps": "DeviceReadBPs",
|
||||
"device_read_iops": "DeviceReadIOps",
|
||||
"device_write_bps": "DeviceWriteBPs",
|
||||
"device_write_iops": "DeviceWriteIOps",
|
||||
"devices": "devices",
|
||||
"hugepage_limits": "hugepageLimits",
|
||||
"memory": "memory",
|
||||
"network": "network",
|
||||
"pids": "pids",
|
||||
"rdma": "rdma",
|
||||
"unified": "unified",
|
||||
}
|
||||
for kwarg_key, data_key in data_mapping.items():
|
||||
value = kwargs.get(kwarg_key)
|
||||
if value is not None:
|
||||
data[data_key] = value
|
||||
|
||||
if kwargs.get("restart_policy"):
|
||||
params["restartPolicy"] = kwargs.get("restart_policy")
|
||||
if kwargs.get("restart_retries"):
|
||||
params["restartRetries"] = kwargs.get("restart_retries")
|
||||
|
||||
response = self.client.post(
|
||||
f"/containers/{self.id}/update", params=params, data=json.dumps(data)
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
def wait(self, **kwargs) -> int:
|
||||
"""Block until the container enters given state.
|
||||
|
|
|
@ -17,7 +17,7 @@ from podman.errors import ImageNotFound
|
|||
|
||||
logger = logging.getLogger("podman.containers")
|
||||
|
||||
NAMED_VOLUME_PATTERN = re.compile(r'[a-zA-Z0-9][a-zA-Z0-9_.-]*')
|
||||
NAMED_VOLUME_PATTERN = re.compile(r"[a-zA-Z0-9][a-zA-Z0-9_.-]*")
|
||||
|
||||
|
||||
class CreateMixin: # pylint: disable=too-few-public-methods
|
||||
|
@ -375,7 +375,9 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
payload = api.prepare_body(payload)
|
||||
|
||||
response = self.client.post(
|
||||
"/containers/create", headers={"content-type": "application/json"}, data=payload
|
||||
"/containers/create",
|
||||
headers={"content-type": "application/json"},
|
||||
data=payload,
|
||||
)
|
||||
response.raise_for_status(not_found=ImageNotFound)
|
||||
|
||||
|
@ -383,6 +385,48 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
|
||||
return self.get(container_id)
|
||||
|
||||
@staticmethod
|
||||
def _convert_env_list_to_dict(env_list):
|
||||
"""Convert a list of environment variables to a dictionary.
|
||||
|
||||
Args:
|
||||
env_list (List[str]): List of environment variables in the format ["KEY=value"]
|
||||
|
||||
Returns:
|
||||
Dict[str, str]: Dictionary of environment variables
|
||||
|
||||
Raises:
|
||||
ValueError: If any environment variable is not in the correct format
|
||||
"""
|
||||
if not isinstance(env_list, list):
|
||||
raise TypeError(f"Expected list, got {type(env_list).__name__}")
|
||||
|
||||
env_dict = {}
|
||||
|
||||
for env_var in env_list:
|
||||
if not isinstance(env_var, str):
|
||||
raise TypeError(
|
||||
f"Environment variable must be a string, "
|
||||
f"got {type(env_var).__name__}: {repr(env_var)}"
|
||||
)
|
||||
|
||||
# Handle empty strings
|
||||
if not env_var.strip():
|
||||
raise ValueError("Environment variable cannot be empty")
|
||||
if "=" not in env_var:
|
||||
raise ValueError(
|
||||
f"Environment variable '{env_var}' is not in the correct format. "
|
||||
"Expected format: 'KEY=value'"
|
||||
)
|
||||
key, value = env_var.split("=", 1) # Split on first '=' only
|
||||
|
||||
# Validate key is not empty
|
||||
if not key.strip():
|
||||
raise ValueError(f"Environment variable has empty key: '{env_var}'")
|
||||
|
||||
env_dict[key] = value
|
||||
return env_dict
|
||||
|
||||
# pylint: disable=too-many-locals,too-many-statements,too-many-branches
|
||||
@staticmethod
|
||||
def _render_payload(kwargs: MutableMapping[str, Any]) -> dict[str, Any]:
|
||||
|
@ -410,6 +454,23 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
with suppress(KeyError):
|
||||
del args[key]
|
||||
|
||||
# Handle environment variables
|
||||
environment = args.pop("environment", None)
|
||||
if environment is not None:
|
||||
if isinstance(environment, list):
|
||||
try:
|
||||
environment = CreateMixin._convert_env_list_to_dict(environment)
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
"Failed to convert environment variables list to dictionary. "
|
||||
f"Error: {str(e)}"
|
||||
) from e
|
||||
elif not isinstance(environment, dict):
|
||||
raise TypeError(
|
||||
"Environment variables must be provided as either a dictionary "
|
||||
"or a list of strings in the format ['KEY=value']"
|
||||
)
|
||||
|
||||
# These keywords are not supported for various reasons.
|
||||
unsupported_keys = set(args.keys()).intersection(
|
||||
(
|
||||
|
@ -436,6 +497,13 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
def pop(k):
|
||||
return args.pop(k, None)
|
||||
|
||||
def normalize_nsmode(
|
||||
mode: Union[str, MutableMapping[str, str]],
|
||||
) -> dict[str, str]:
|
||||
if isinstance(mode, dict):
|
||||
return mode
|
||||
return {"nsmode": mode}
|
||||
|
||||
def to_bytes(size: Union[int, str, None]) -> Union[int, None]:
|
||||
"""
|
||||
Converts str or int to bytes.
|
||||
|
@ -459,9 +527,9 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
try:
|
||||
return int(size)
|
||||
except ValueError as bad_size:
|
||||
mapping = {'b': 0, 'k': 1, 'm': 2, 'g': 3}
|
||||
mapping_regex = ''.join(mapping.keys())
|
||||
search = re.search(rf'^(\d+)([{mapping_regex}])$', size.lower())
|
||||
mapping = {"b": 0, "k": 1, "m": 2, "g": 3}
|
||||
mapping_regex = "".join(mapping.keys())
|
||||
search = re.search(rf"^(\d+)([{mapping_regex}])$", size.lower())
|
||||
if search:
|
||||
return int(search.group(1)) * (1024 ** mapping[search.group(2)])
|
||||
raise TypeError(
|
||||
|
@ -490,7 +558,7 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
"dns_search": pop("dns_search"),
|
||||
"dns_server": pop("dns"),
|
||||
"entrypoint": pop("entrypoint"),
|
||||
"env": pop("environment"),
|
||||
"env": environment,
|
||||
"env_host": pop("env_host"), # TODO document, podman only
|
||||
"expose": {},
|
||||
"groups": pop("group_add"),
|
||||
|
@ -600,7 +668,7 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
if _k in bool_options and v is True:
|
||||
options.append(option_name)
|
||||
elif _k in regular_options:
|
||||
options.append(f'{option_name}={v}')
|
||||
options.append(f"{option_name}={v}")
|
||||
elif _k in simple_options:
|
||||
options.append(v)
|
||||
|
||||
|
@ -702,12 +770,12 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
|
||||
for item in args.pop("volumes", {}).items():
|
||||
key, value = item
|
||||
extended_mode = value.get('extended_mode', [])
|
||||
extended_mode = value.get("extended_mode", [])
|
||||
if not isinstance(extended_mode, list):
|
||||
raise ValueError("'extended_mode' value should be a list")
|
||||
|
||||
options = extended_mode
|
||||
mode = value.get('mode')
|
||||
mode = value.get("mode")
|
||||
if mode is not None:
|
||||
if not isinstance(mode, str):
|
||||
raise ValueError("'mode' value should be a str")
|
||||
|
@ -722,10 +790,10 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
params["volumes"].append(volume)
|
||||
else:
|
||||
mount_point = {
|
||||
"destination": value['bind'],
|
||||
"destination": value["bind"],
|
||||
"options": options,
|
||||
"source": key,
|
||||
"type": 'bind',
|
||||
"type": "bind",
|
||||
}
|
||||
params["mounts"].append(mount_point)
|
||||
|
||||
|
@ -746,10 +814,10 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
params["secret_env"] = args.pop("secret_env", {})
|
||||
|
||||
if "cgroupns" in args:
|
||||
params["cgroupns"] = {"nsmode": args.pop("cgroupns")}
|
||||
params["cgroupns"] = normalize_nsmode(args.pop("cgroupns"))
|
||||
|
||||
if "ipc_mode" in args:
|
||||
params["ipcns"] = {"nsmode": args.pop("ipc_mode")}
|
||||
params["ipcns"] = normalize_nsmode(args.pop("ipc_mode"))
|
||||
|
||||
if "network_mode" in args:
|
||||
network_mode = args.pop("network_mode")
|
||||
|
@ -760,13 +828,13 @@ class CreateMixin: # pylint: disable=too-few-public-methods
|
|||
params["netns"] = {"nsmode": network_mode}
|
||||
|
||||
if "pid_mode" in args:
|
||||
params["pidns"] = {"nsmode": args.pop("pid_mode")}
|
||||
params["pidns"] = normalize_nsmode(args.pop("pid_mode"))
|
||||
|
||||
if "userns_mode" in args:
|
||||
params["userns"] = {"nsmode": args.pop("userns_mode")}
|
||||
params["userns"] = normalize_nsmode(args.pop("userns_mode"))
|
||||
|
||||
if "uts_mode" in args:
|
||||
params["utsns"] = {"nsmode": args.pop("uts_mode")}
|
||||
params["utsns"] = normalize_nsmode(args.pop("uts_mode"))
|
||||
|
||||
if len(args) > 0:
|
||||
raise TypeError(
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
import logging
|
||||
import urllib
|
||||
from typing import Any, Union
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Union
|
||||
|
||||
from podman import api
|
||||
from podman.domain.containers import Container
|
||||
|
@ -27,12 +27,15 @@ class ContainersManager(RunMixin, CreateMixin, Manager):
|
|||
response = self.client.get(f"/containers/{key}/exists")
|
||||
return response.ok
|
||||
|
||||
def get(self, key: str) -> Container:
|
||||
def get(self, key: str, **kwargs) -> Container:
|
||||
"""Get container by name or id.
|
||||
|
||||
Args:
|
||||
key: Container name or id.
|
||||
|
||||
Keyword Args:
|
||||
compatible (bool): Use Docker compatibility endpoint
|
||||
|
||||
Returns:
|
||||
A `Container` object corresponding to `key`.
|
||||
|
||||
|
@ -40,8 +43,10 @@ class ContainersManager(RunMixin, CreateMixin, Manager):
|
|||
NotFound: when Container does not exist
|
||||
APIError: when an error return by service
|
||||
"""
|
||||
compatible = kwargs.get("compatible", False)
|
||||
|
||||
container_id = urllib.parse.quote_plus(key)
|
||||
response = self.client.get(f"/containers/{container_id}/json")
|
||||
response = self.client.get(f"/containers/{container_id}/json", compatible=compatible)
|
||||
response.raise_for_status()
|
||||
return self.prepare_model(attrs=response.json())
|
||||
|
||||
|
@ -67,12 +72,26 @@ class ContainersManager(RunMixin, CreateMixin, Manager):
|
|||
Give the container name or id.
|
||||
- since (str): Only containers created after a particular container.
|
||||
Give container name or id.
|
||||
sparse: Ignored
|
||||
sparse: If False, return basic container information without additional
|
||||
inspection requests. This improves performance when listing many containers
|
||||
but might provide less detail. You can call Container.reload() on individual
|
||||
containers later to retrieve complete attributes. Default: True.
|
||||
When Docker compatibility is enabled with `compatible=True`: Default: False.
|
||||
ignore_removed: If True, ignore failures due to missing containers.
|
||||
|
||||
Raises:
|
||||
APIError: when service returns an error
|
||||
"""
|
||||
compatible = kwargs.get("compatible", False)
|
||||
|
||||
# Set sparse default based on mode:
|
||||
# Libpod behavior: default is sparse=True (faster, requires reload for full details)
|
||||
# Docker behavior: default is sparse=False (full details immediately, compatible)
|
||||
if "sparse" in kwargs:
|
||||
sparse = kwargs["sparse"]
|
||||
else:
|
||||
sparse = not compatible # True for libpod, False for compat
|
||||
|
||||
params = {
|
||||
"all": kwargs.get("all"),
|
||||
"filters": kwargs.get("filters", {}),
|
||||
|
@ -86,10 +105,21 @@ class ContainersManager(RunMixin, CreateMixin, Manager):
|
|||
# filters formatted last because some kwargs may need to be mapped into filters
|
||||
params["filters"] = api.prepare_filters(params["filters"])
|
||||
|
||||
response = self.client.get("/containers/json", params=params)
|
||||
response = self.client.get("/containers/json", params=params, compatible=compatible)
|
||||
response.raise_for_status()
|
||||
|
||||
return [self.prepare_model(attrs=i) for i in response.json()]
|
||||
containers: list[Container] = [self.prepare_model(attrs=i) for i in response.json()]
|
||||
|
||||
# If sparse is False, reload each container to get full details
|
||||
if not sparse:
|
||||
for container in containers:
|
||||
try:
|
||||
container.reload(compatible=compatible)
|
||||
except APIError:
|
||||
# Skip containers that might have been removed
|
||||
pass
|
||||
|
||||
return containers
|
||||
|
||||
def prune(self, filters: Mapping[str, str] = None) -> dict[str, Any]:
|
||||
"""Delete stopped containers.
|
||||
|
@ -120,7 +150,7 @@ class ContainersManager(RunMixin, CreateMixin, Manager):
|
|||
explanation=f"""Failed to prune container '{entry["Id"]}'""",
|
||||
)
|
||||
|
||||
results["ContainersDeleted"].append(entry["Id"])
|
||||
results["ContainersDeleted"].append(entry["Id"]) # type: ignore[attr-defined]
|
||||
results["SpaceReclaimed"] += entry["Size"]
|
||||
return results
|
||||
|
||||
|
|
|
@ -44,7 +44,14 @@ class RunMixin: # pylint: disable=too-few-public-methods
|
|||
side. Default: False.
|
||||
|
||||
Keyword Args:
|
||||
- See the create() method for keyword arguments.
|
||||
- These args are directly used to pull an image when the image is not found.
|
||||
auth_config (Mapping[str, str]): Override the credentials that are found in the
|
||||
config for this request. auth_config should contain the username and password
|
||||
keys to be valid.
|
||||
platform (str): Platform in the format os[/arch[/variant]]
|
||||
policy (str): Pull policy. "missing" (default), "always", "never", "newer"
|
||||
|
||||
- See the create() method for other keyword arguments.
|
||||
|
||||
Returns:
|
||||
- When detach is True, return a Container
|
||||
|
@ -66,7 +73,12 @@ class RunMixin: # pylint: disable=too-few-public-methods
|
|||
try:
|
||||
container = self.create(image=image, command=command, **kwargs)
|
||||
except ImageNotFound:
|
||||
self.podman_client.images.pull(image, platform=kwargs.get("platform"))
|
||||
self.podman_client.images.pull(
|
||||
image,
|
||||
auth_config=kwargs.get("auth_config"),
|
||||
platform=kwargs.get("platform"),
|
||||
policy=kwargs.get("policy", "missing"),
|
||||
)
|
||||
container = self.create(image=image, command=command, **kwargs)
|
||||
|
||||
container.start()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Model and Manager for Image resources."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Optional, Literal, Union
|
||||
from typing import Any, Optional, Literal, Union, TYPE_CHECKING
|
||||
from collections.abc import Iterator
|
||||
|
||||
import urllib.parse
|
||||
|
@ -10,12 +10,17 @@ from podman.api import DEFAULT_CHUNK_SIZE
|
|||
from podman.domain.manager import PodmanResource
|
||||
from podman.errors import ImageNotFound, InvalidArgument
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from podman.domain.images_manager import ImagesManager
|
||||
|
||||
logger = logging.getLogger("podman.images")
|
||||
|
||||
|
||||
class Image(PodmanResource):
|
||||
"""Details and configuration for an Image managed by the Podman service."""
|
||||
|
||||
manager: "ImagesManager"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"""<{self.__class__.__name__}: '{"', '".join(self.tags)}'>"""
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ class BuildMixin:
|
|||
nocache (bool) – Don’t use the cache when set to True
|
||||
rm (bool) – Remove intermediate containers. Default True
|
||||
timeout (int) – HTTP timeout
|
||||
custom_context (bool) – Optional if using fileobj (ignored)
|
||||
custom_context (bool) – Optional if using fileobj
|
||||
encoding (str) – The encoding for a stream. Set to gzip for compressing (ignored)
|
||||
pull (bool) – Downloads any updates to the FROM image in Dockerfile
|
||||
forcerm (bool) – Always remove intermediate containers, even after unsuccessful builds
|
||||
|
@ -82,7 +82,23 @@ class BuildMixin:
|
|||
|
||||
body = None
|
||||
path = None
|
||||
if "fileobj" in kwargs:
|
||||
if kwargs.get("custom_context"):
|
||||
if "fileobj" not in kwargs:
|
||||
raise PodmanError(
|
||||
"Custom context requires fileobj to be set to a binary file-like object "
|
||||
"containing a build-directory tarball."
|
||||
)
|
||||
if "dockerfile" not in kwargs:
|
||||
# TODO: Scan the tarball for either a Dockerfile or a Containerfile.
|
||||
# This could be slow if the tarball is large,
|
||||
# and could require buffering/copying the tarball if `fileobj` is not seekable.
|
||||
# As a workaround for now, don't support omitting the filename.
|
||||
raise PodmanError(
|
||||
"Custom context requires specifying the name of the Dockerfile "
|
||||
"(typically 'Dockerfile' or 'Containerfile')."
|
||||
)
|
||||
body = kwargs["fileobj"]
|
||||
elif "fileobj" in kwargs:
|
||||
path = tempfile.TemporaryDirectory() # pylint: disable=consider-using-with
|
||||
filename = pathlib.Path(path.name) / params["dockerfile"]
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import requests
|
|||
|
||||
from podman import api
|
||||
from podman.api.parse_utils import parse_repository
|
||||
from podman.api.http_utils import encode_auth_header
|
||||
from podman.domain.images import Image
|
||||
from podman.domain.images_build import BuildMixin
|
||||
from podman.domain.json_stream import json_stream
|
||||
|
@ -123,7 +122,7 @@ class ImagesManager(BuildMixin, Manager):
|
|||
|
||||
def load(
|
||||
self, data: Optional[bytes] = None, file_path: Optional[os.PathLike] = None
|
||||
) -> Generator[bytes, None, None]:
|
||||
) -> Generator[Image, None, None]:
|
||||
"""Restore an image previously saved.
|
||||
|
||||
Args:
|
||||
|
@ -159,7 +158,7 @@ class ImagesManager(BuildMixin, Manager):
|
|||
)
|
||||
response.raise_for_status() # Catch any errors before proceeding
|
||||
|
||||
def _generator(body: dict) -> Generator[bytes, None, None]:
|
||||
def _generator(body: dict) -> Generator[Image, None, None]:
|
||||
# Iterate and yield images from response body
|
||||
for item in body["Names"]:
|
||||
yield self.get(item)
|
||||
|
@ -264,7 +263,7 @@ class ImagesManager(BuildMixin, Manager):
|
|||
|
||||
headers = {
|
||||
# A base64url-encoded auth configuration
|
||||
"X-Registry-Auth": encode_auth_header(auth_config) if auth_config else ""
|
||||
"X-Registry-Auth": api.encode_auth_header(auth_config) if auth_config else ""
|
||||
}
|
||||
|
||||
params = {
|
||||
|
@ -273,9 +272,14 @@ class ImagesManager(BuildMixin, Manager):
|
|||
"format": kwargs.get("format"),
|
||||
}
|
||||
|
||||
stream = kwargs.get("stream", False)
|
||||
decode = kwargs.get("decode", False)
|
||||
|
||||
name = f'{repository}:{tag}' if tag else repository
|
||||
name = urllib.parse.quote_plus(name)
|
||||
response = self.client.post(f"/images/{name}/push", params=params, headers=headers)
|
||||
response = self.client.post(
|
||||
f"/images/{name}/push", params=params, stream=stream, headers=headers
|
||||
)
|
||||
response.raise_for_status(not_found=ImageNotFound)
|
||||
|
||||
tag_count = 0 if tag is None else 1
|
||||
|
@ -290,8 +294,6 @@ class ImagesManager(BuildMixin, Manager):
|
|||
},
|
||||
]
|
||||
|
||||
stream = kwargs.get("stream", False)
|
||||
decode = kwargs.get("decode", False)
|
||||
if stream:
|
||||
return self._push_helper(decode, body)
|
||||
|
||||
|
@ -335,6 +337,7 @@ class ImagesManager(BuildMixin, Manager):
|
|||
decode (bool) – Decode the JSON data from the server into dicts.
|
||||
Only applies with ``stream=True``
|
||||
platform (str) – Platform in the format os[/arch[/variant]]
|
||||
policy (str) - Pull policy. "always" (default), "missing", "never", "newer"
|
||||
progress_bar (bool) - Display a progress bar with the image pull progress (uses
|
||||
the compat endpoint). Default: False
|
||||
tls_verify (bool) - Require TLS verification. Default: True.
|
||||
|
@ -359,10 +362,11 @@ class ImagesManager(BuildMixin, Manager):
|
|||
|
||||
headers = {
|
||||
# A base64url-encoded auth configuration
|
||||
"X-Registry-Auth": encode_auth_header(auth_config) if auth_config else ""
|
||||
"X-Registry-Auth": api.encode_auth_header(auth_config) if auth_config else ""
|
||||
}
|
||||
|
||||
params = {
|
||||
"policy": kwargs.get("policy", "always"),
|
||||
"reference": repository,
|
||||
"tlsVerify": kwargs.get("tls_verify", True),
|
||||
"compatMode": kwargs.get("compatMode", True),
|
||||
|
|
|
@ -2,11 +2,14 @@
|
|||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import abc
|
||||
from typing import Any, Optional, TypeVar, Union
|
||||
from typing import Any, Optional, TypeVar, Union, TYPE_CHECKING
|
||||
from collections.abc import Mapping
|
||||
|
||||
from podman.api.client import APIClient
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from podman import PodmanClient
|
||||
|
||||
# Methods use this Type when a subclass of PodmanResource is expected.
|
||||
PodmanResourceType: TypeVar = TypeVar("PodmanResourceType", bound="PodmanResource")
|
||||
|
||||
|
@ -67,9 +70,13 @@ class PodmanResource(ABC): # noqa: B024
|
|||
return self.id[:17]
|
||||
return self.id[:10]
|
||||
|
||||
def reload(self) -> None:
|
||||
"""Refresh this object's data from the service."""
|
||||
latest = self.manager.get(self.id)
|
||||
def reload(self, **kwargs) -> None:
|
||||
"""Refresh this object's data from the service.
|
||||
|
||||
Keyword Args:
|
||||
compatible (bool): Use Docker compatibility endpoint
|
||||
"""
|
||||
latest = self.manager.get(self.id, **kwargs)
|
||||
self.attrs = latest.attrs
|
||||
|
||||
|
||||
|
|
|
@ -97,6 +97,7 @@ class Manifest(PodmanResource):
|
|||
self,
|
||||
destination: str,
|
||||
all: Optional[bool] = None, # pylint: disable=redefined-builtin
|
||||
**kwargs,
|
||||
) -> None:
|
||||
"""Push a manifest list or image index to a registry.
|
||||
|
||||
|
@ -104,15 +105,32 @@ class Manifest(PodmanResource):
|
|||
destination: Target for push.
|
||||
all: Push all images.
|
||||
|
||||
Keyword Args:
|
||||
auth_config (Mapping[str, str]: Override configured credentials. Must include
|
||||
username and password keys.
|
||||
|
||||
Raises:
|
||||
NotFound: when the Manifest could not be found
|
||||
APIError: when service reports an error
|
||||
"""
|
||||
auth_config: Optional[dict[str, str]] = kwargs.get("auth_config")
|
||||
|
||||
headers = {
|
||||
# A base64url-encoded auth configuration
|
||||
"X-Registry-Auth": api.encode_auth_header(auth_config) if auth_config else ""
|
||||
}
|
||||
|
||||
params = {
|
||||
"all": all,
|
||||
"destination": destination,
|
||||
}
|
||||
response = self.client.post(f"/manifests/{self.quoted_name}/push", params=params)
|
||||
|
||||
destination_quoted = urllib.parse.quote_plus(destination)
|
||||
response = self.client.post(
|
||||
f"/manifests/{self.quoted_name}/registry/{destination_quoted}",
|
||||
params=params,
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
def remove(self, digest: str) -> None:
|
||||
|
|
|
@ -12,7 +12,7 @@ Example:
|
|||
import ipaddress
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Literal
|
||||
from typing import Any, Optional, Literal, Union
|
||||
|
||||
from podman.api import http_utils, prepare_filters
|
||||
from podman.domain.manager import Manager
|
||||
|
@ -188,7 +188,7 @@ class NetworksManager(Manager):
|
|||
|
||||
return {"NetworksDeleted": deleted, "SpaceReclaimed": 0}
|
||||
|
||||
def remove(self, name: [Network, str], force: Optional[bool] = None) -> None:
|
||||
def remove(self, name: Union[Network, str], force: Optional[bool] = None) -> None:
|
||||
"""Remove Network resource.
|
||||
|
||||
Args:
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
"""Model and Manager for Pod resources."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Optional, Union, TYPE_CHECKING
|
||||
|
||||
from podman.domain.manager import PodmanResource
|
||||
|
||||
_Timeout = Union[None, float, tuple[float, float], tuple[float, None]]
|
||||
if TYPE_CHECKING:
|
||||
from podman.domain.pods_manager import PodsManager
|
||||
|
||||
_Timeout = Union[None, int, tuple[int, int], tuple[int, None]]
|
||||
|
||||
logger = logging.getLogger("podman.pods")
|
||||
|
||||
|
@ -13,6 +16,8 @@ logger = logging.getLogger("podman.pods")
|
|||
class Pod(PodmanResource):
|
||||
"""Details and configuration for a pod managed by the Podman service."""
|
||||
|
||||
manager: "PodsManager"
|
||||
|
||||
@property
|
||||
def id(self): # pylint: disable=invalid-name
|
||||
return self.attrs.get("ID", self.attrs.get("Id"))
|
||||
|
|
|
@ -40,7 +40,7 @@ class RegistryData(PodmanResource):
|
|||
Args:
|
||||
platform: Platform for which to pull Image. Default: None (all platforms.)
|
||||
"""
|
||||
repository = api.parse_repository(self.image_name)
|
||||
repository, _ = api.parse_repository(self.image_name)
|
||||
return self.manager.pull(repository, tag=self.id, platform=platform)
|
||||
|
||||
def has_platform(self, platform: Union[str, Mapping[str, Any]]) -> bool:
|
||||
|
|
|
@ -56,9 +56,9 @@ class SystemManager:
|
|||
password: Registry plaintext password
|
||||
email: Registry account email address
|
||||
registry: URL for registry access. For example,
|
||||
https://quay.io/v2
|
||||
reauth: Ignored: If True, refresh existing authentication. Default: False
|
||||
dockercfg_path: Ignored: Path to custom configuration file.
|
||||
https://quay.io/v2
|
||||
auth: TODO: Add description based on the source code of Podman.
|
||||
identitytoken: IdentityToken is used to authenticate the user and
|
||||
get an access token for the registry.
|
||||
|
|
|
@ -35,6 +35,23 @@ class Volume(PodmanResource):
|
|||
"""
|
||||
self.manager.remove(self.name, force=force)
|
||||
|
||||
def inspect(self, **kwargs) -> dict:
|
||||
"""Inspect this volume
|
||||
|
||||
Keyword Args:
|
||||
tls_verify (bool) - Require TLS verification. Default: True.
|
||||
|
||||
Returns:
|
||||
Display attributes of volume.
|
||||
|
||||
Raises:
|
||||
APIError: when service reports an error
|
||||
"""
|
||||
params = {"tlsVerify": kwargs.get("tls_verify", True)}
|
||||
response = self.client.get(f"/volumes/{self.id}/json", params=params)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
class VolumesManager(Manager):
|
||||
"""Specialized Manager for Volume resources."""
|
||||
|
|
|
@ -58,7 +58,7 @@ try:
|
|||
from .exceptions import ImageNotFound
|
||||
except ImportError:
|
||||
|
||||
class ImageNotFound(NotFoundError):
|
||||
class ImageNotFound(NotFoundError): # type: ignore[no-redef]
|
||||
"""HTTP request returned a http.HTTPStatus.NOT_FOUND.
|
||||
|
||||
Specialized for Image not found. Deprecated.
|
||||
|
|
|
@ -3,5 +3,5 @@
|
|||
# Do not auto-update these from version.py,
|
||||
# as test code should be changed to reflect changes in Podman API versions
|
||||
BASE_SOCK = "unix:///run/api.sock"
|
||||
LIBPOD_URL = "http://%2Frun%2Fapi.sock/v5.4.0/libpod"
|
||||
LIBPOD_URL = "http://%2Frun%2Fapi.sock/v5.6.0/libpod"
|
||||
COMPATIBLE_URL = "http://%2Frun%2Fapi.sock/v1.40"
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--pnext", action="store_true", default=False, help="run tests against podman_next copr"
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line("markers", "pnext: mark test as run against podman_next")
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config, items):
|
||||
if config.getoption("--pnext"):
|
||||
# --pnext given in cli: run tests marked as pnext
|
||||
return
|
||||
podman_next = pytest.mark.skip(reason="need --pnext option to run")
|
||||
for item in items:
|
||||
if "pnext" in item.keywords:
|
||||
item.add_marker(podman_next)
|
|
@ -1,9 +1,11 @@
|
|||
import unittest
|
||||
|
||||
import re
|
||||
import os
|
||||
import pytest
|
||||
|
||||
import podman.tests.integration.base as base
|
||||
from podman import PodmanClient
|
||||
from podman.tests.utils import PODMAN_VERSION
|
||||
|
||||
# @unittest.skipIf(os.geteuid() != 0, 'Skipping, not running as root')
|
||||
|
||||
|
@ -20,7 +22,7 @@ class ContainersIntegrationTest(base.IntegrationTest):
|
|||
self.alpine_image = self.client.images.pull("quay.io/libpod/alpine", tag="latest")
|
||||
self.containers = []
|
||||
|
||||
def tearUp(self):
|
||||
def tearDown(self):
|
||||
for container in self.containers:
|
||||
container.remove(force=True)
|
||||
|
||||
|
@ -102,6 +104,44 @@ class ContainersIntegrationTest(base.IntegrationTest):
|
|||
for hosts_entry in formatted_hosts:
|
||||
self.assertIn(hosts_entry, logs)
|
||||
|
||||
def test_container_environment_variables(self):
|
||||
"""Test environment variables passed to the container."""
|
||||
with self.subTest("Check environment variables as dictionary"):
|
||||
env_dict = {"MY_VAR": "123", "ANOTHER_VAR": "456"}
|
||||
container = self.client.containers.create(
|
||||
self.alpine_image, command=["env"], environment=env_dict
|
||||
)
|
||||
self.containers.append(container)
|
||||
|
||||
container_env = container.attrs.get('Config', {}).get('Env', [])
|
||||
for key, value in env_dict.items():
|
||||
self.assertIn(f"{key}={value}", container_env)
|
||||
|
||||
container.start()
|
||||
container.wait()
|
||||
logs = b"\n".join(container.logs()).decode()
|
||||
|
||||
for key, value in env_dict.items():
|
||||
self.assertIn(f"{key}={value}", logs)
|
||||
|
||||
with self.subTest("Check environment variables as list"):
|
||||
env_list = ["MY_VAR=123", "ANOTHER_VAR=456"]
|
||||
container = self.client.containers.create(
|
||||
self.alpine_image, command=["env"], environment=env_list
|
||||
)
|
||||
self.containers.append(container)
|
||||
|
||||
container_env = container.attrs.get('Config', {}).get('Env', [])
|
||||
for env in env_list:
|
||||
self.assertIn(env, container_env)
|
||||
|
||||
container.start()
|
||||
container.wait()
|
||||
logs = b"\n".join(container.logs()).decode()
|
||||
|
||||
for env in env_list:
|
||||
self.assertIn(env, logs)
|
||||
|
||||
def _test_memory_limit(self, parameter_name, host_config_name, set_mem_limit=False):
|
||||
"""Base for tests which checks memory limits"""
|
||||
memory_limit_tests = [
|
||||
|
@ -239,6 +279,11 @@ class ContainersIntegrationTest(base.IntegrationTest):
|
|||
"""Test passing shared memory size"""
|
||||
self._test_memory_limit('shm_size', 'ShmSize')
|
||||
|
||||
@pytest.mark.skipif(os.geteuid() != 0, reason='Skipping, not running as root')
|
||||
@pytest.mark.skipif(
|
||||
PODMAN_VERSION >= (5, 6, 0),
|
||||
reason="Test against this feature in Podman 5.6.0 or greater https://github.com/containers/podman/pull/25942",
|
||||
)
|
||||
def test_container_mounts(self):
|
||||
"""Test passing mounts"""
|
||||
with self.subTest("Check bind mount"):
|
||||
|
@ -311,6 +356,47 @@ class ContainersIntegrationTest(base.IntegrationTest):
|
|||
|
||||
self.assertEqual(container.attrs.get('State', dict()).get('ExitCode', 256), 0)
|
||||
|
||||
@pytest.mark.skipif(os.geteuid() != 0, reason='Skipping, not running as root')
|
||||
@pytest.mark.skipif(
|
||||
PODMAN_VERSION < (5, 6, 0),
|
||||
reason="Test against this feature before Podman 5.6.0 https://github.com/containers/podman/pull/25942",
|
||||
)
|
||||
def test_container_mounts_without_rw_as_default(self):
|
||||
"""Test passing mounts"""
|
||||
with self.subTest("Check bind mount"):
|
||||
mount = {
|
||||
"type": "bind",
|
||||
"source": "/etc/hosts",
|
||||
"target": "/test",
|
||||
"read_only": True,
|
||||
"relabel": "Z",
|
||||
}
|
||||
container = self.client.containers.create(
|
||||
self.alpine_image, command=["cat", "/test"], mounts=[mount]
|
||||
)
|
||||
self.containers.append(container)
|
||||
self.assertIn(
|
||||
f"{mount['source']}:{mount['target']}:ro,Z,rprivate,rbind",
|
||||
container.attrs.get('HostConfig', {}).get('Binds', list()),
|
||||
)
|
||||
|
||||
# check if container can be started and exits with EC == 0
|
||||
container.start()
|
||||
container.wait()
|
||||
|
||||
self.assertEqual(container.attrs.get('State', dict()).get('ExitCode', 256), 0)
|
||||
|
||||
with self.subTest("Check tmpfs mount"):
|
||||
mount = {"type": "tmpfs", "source": "tmpfs", "target": "/test", "size": "456k"}
|
||||
container = self.client.containers.create(
|
||||
self.alpine_image, command=["df", "-h"], mounts=[mount]
|
||||
)
|
||||
self.containers.append(container)
|
||||
self.assertEqual(
|
||||
container.attrs.get('HostConfig', {}).get('Tmpfs', {}).get(mount['target']),
|
||||
f"size={mount['size']},rprivate,nosuid,nodev,tmpcopyup",
|
||||
)
|
||||
|
||||
def test_container_devices(self):
|
||||
devices = ["/dev/null:/dev/foo", "/dev/zero:/dev/bar"]
|
||||
container = self.client.containers.create(
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import io
|
||||
import random
|
||||
import tarfile
|
||||
import unittest
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
try:
|
||||
# Python >= 3.10
|
||||
|
@ -17,7 +17,6 @@ from podman.domain.containers import Container
|
|||
from podman.domain.images import Image
|
||||
from podman.errors import NotFound
|
||||
|
||||
|
||||
# @unittest.skipIf(os.geteuid() != 0, 'Skipping, not running as root')
|
||||
|
||||
|
||||
|
@ -238,6 +237,38 @@ ENV foo=bar
|
|||
labeled_container.remove(v=True)
|
||||
unlabeled_container.remove(v=True)
|
||||
|
||||
def test_container_update(self):
|
||||
"""Update container"""
|
||||
to_update_container = self.client.containers.run(
|
||||
self.alpine_image, name="to_update_container", detach=True
|
||||
)
|
||||
with self.subTest("Test container update changing the healthcheck"):
|
||||
to_update_container.update(health_cmd="ls")
|
||||
self.assertEqual(
|
||||
to_update_container.inspect()['Config']['Healthcheck']['Test'], ['CMD-SHELL', 'ls']
|
||||
)
|
||||
|
||||
with self.subTest("Test container update disabling the healthcheck"):
|
||||
to_update_container.update(no_healthcheck=True)
|
||||
self.assertEqual(
|
||||
to_update_container.inspect()['Config']['Healthcheck']['Test'], ['NONE']
|
||||
)
|
||||
with self.subTest("Test container update passing payload and data"):
|
||||
to_update_container.update(
|
||||
restart_policy="always", health_cmd="echo", health_timeout="10s"
|
||||
)
|
||||
self.assertEqual(
|
||||
to_update_container.inspect()['Config']['Healthcheck']['Test'],
|
||||
['CMD-SHELL', 'echo'],
|
||||
)
|
||||
self.assertEqual(
|
||||
to_update_container.inspect()['Config']['Healthcheck']['Timeout'], 10000000000
|
||||
)
|
||||
self.assertEqual(
|
||||
to_update_container.inspect()['HostConfig']['RestartPolicy']['Name'], 'always'
|
||||
)
|
||||
to_update_container.remove(v=True)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
"""Images integration tests."""
|
||||
|
||||
import io
|
||||
import platform
|
||||
import tarfile
|
||||
import types
|
||||
import unittest
|
||||
|
@ -22,7 +23,7 @@ import unittest
|
|||
import podman.tests.integration.base as base
|
||||
from podman import PodmanClient
|
||||
from podman.domain.images import Image
|
||||
from podman.errors import APIError, ImageNotFound
|
||||
from podman.errors import APIError, ImageNotFound, PodmanError
|
||||
|
||||
|
||||
# @unittest.skipIf(os.geteuid() != 0, 'Skipping, not running as root')
|
||||
|
@ -143,10 +144,48 @@ class ImagesIntegrationTest(base.IntegrationTest):
|
|||
self.assertIsNotNone(image)
|
||||
self.assertIsNotNone(image.id)
|
||||
|
||||
def test_build_with_context(self):
|
||||
context = io.BytesIO()
|
||||
with tarfile.open(fileobj=context, mode="w") as tar:
|
||||
|
||||
def add_file(name: str, content: str):
|
||||
binary_content = content.encode("utf-8")
|
||||
fileobj = io.BytesIO(binary_content)
|
||||
tarinfo = tarfile.TarInfo(name=name)
|
||||
tarinfo.size = len(binary_content)
|
||||
tar.addfile(tarinfo, fileobj)
|
||||
|
||||
# Use a non-standard Dockerfile name to test the 'dockerfile' argument
|
||||
add_file(
|
||||
"MyDockerfile", ("FROM quay.io/libpod/alpine_labels:latest\nCOPY example.txt .\n")
|
||||
)
|
||||
add_file("example.txt", "This is an example file.\n")
|
||||
|
||||
# Rewind to the start of the generated file so we can read it
|
||||
context.seek(0)
|
||||
|
||||
with self.assertRaises(PodmanError):
|
||||
# If requesting a custom context, must provide the context as `fileobj`
|
||||
self.client.images.build(custom_context=True, path='invalid')
|
||||
|
||||
with self.assertRaises(PodmanError):
|
||||
# If requesting a custom context, currently must specify the dockerfile name
|
||||
self.client.images.build(custom_context=True, fileobj=context)
|
||||
|
||||
image, stream = self.client.images.build(
|
||||
fileobj=context,
|
||||
dockerfile="MyDockerfile",
|
||||
custom_context=True,
|
||||
)
|
||||
self.assertIsNotNone(image)
|
||||
self.assertIsNotNone(image.id)
|
||||
|
||||
@unittest.skipIf(platform.architecture()[0] == "32bit", "no 32-bit image available")
|
||||
def test_pull_stream(self):
|
||||
generator = self.client.images.pull("ubi8", tag="latest", stream=True)
|
||||
self.assertIsInstance(generator, types.GeneratorType)
|
||||
|
||||
@unittest.skipIf(platform.architecture()[0] == "32bit", "no 32-bit image available")
|
||||
def test_pull_stream_decode(self):
|
||||
generator = self.client.images.pull("ubi8", tag="latest", stream=True, decode=True)
|
||||
self.assertIsInstance(generator, types.GeneratorType)
|
||||
|
|
|
@ -50,7 +50,7 @@ class PodmanLauncher:
|
|||
self.socket_file: str = socket_uri.replace('unix://', '')
|
||||
self.log_level = log_level
|
||||
|
||||
self.proc = None
|
||||
self.proc: Optional[subprocess.Popen[bytes]] = None
|
||||
self.reference_id = hash(time.monotonic())
|
||||
|
||||
self.cmd: list[str] = []
|
||||
|
|
|
@ -11,7 +11,7 @@ from podman import api
|
|||
|
||||
|
||||
class TestUtilsCase(unittest.TestCase):
|
||||
def test_format_filters(self):
|
||||
def test_format_filters(self) -> None:
|
||||
@dataclass
|
||||
class TestCase:
|
||||
name: str
|
||||
|
@ -42,12 +42,12 @@ class TestUtilsCase(unittest.TestCase):
|
|||
if actual is not None:
|
||||
self.assertIsInstance(actual, str)
|
||||
|
||||
def test_containerignore_404(self):
|
||||
def test_containerignore_404(self) -> None:
|
||||
actual = api.prepare_containerignore("/does/not/exists")
|
||||
self.assertListEqual([], actual)
|
||||
|
||||
@patch.object(pathlib.Path, "exists", return_value=True)
|
||||
def test_containerignore_read(self, patch_exists):
|
||||
def test_containerignore_read(self, patch_exists) -> None:
|
||||
data = r"""# unittest
|
||||
|
||||
#Ignore the logs directory
|
||||
|
@ -74,7 +74,7 @@ class TestUtilsCase(unittest.TestCase):
|
|||
patch_exists.assert_called_once_with()
|
||||
|
||||
@patch.object(pathlib.Path, "exists", return_value=True)
|
||||
def test_containerignore_empty(self, patch_exists):
|
||||
def test_containerignore_empty(self, patch_exists) -> None:
|
||||
data = r"""# unittest
|
||||
"""
|
||||
|
||||
|
@ -86,21 +86,21 @@ class TestUtilsCase(unittest.TestCase):
|
|||
patch_exists.assert_called_once_with()
|
||||
|
||||
@mock.patch("pathlib.Path.parent", autospec=True)
|
||||
def test_containerfile_1(self, mock_parent):
|
||||
def test_containerfile_1(self, mock_parent) -> None:
|
||||
mock_parent.samefile.return_value = True
|
||||
actual = api.prepare_containerfile("/work", "/work/Dockerfile")
|
||||
self.assertEqual(actual, "Dockerfile")
|
||||
mock_parent.samefile.assert_called()
|
||||
|
||||
@mock.patch("pathlib.Path.parent", autospec=True)
|
||||
def test_containerfile_2(self, mock_parent):
|
||||
def test_containerfile_2(self, mock_parent) -> None:
|
||||
mock_parent.samefile.return_value = True
|
||||
actual = api.prepare_containerfile(".", "Dockerfile")
|
||||
self.assertEqual(actual, "Dockerfile")
|
||||
mock_parent.samefile.assert_called()
|
||||
|
||||
@mock.patch("shutil.copy2")
|
||||
def test_containerfile_copy(self, mock_copy):
|
||||
def test_containerfile_copy(self, mock_copy) -> None:
|
||||
mock_copy.return_value = None
|
||||
|
||||
with mock.patch.object(pathlib.Path, "parent") as mock_parent:
|
||||
|
@ -109,7 +109,7 @@ class TestUtilsCase(unittest.TestCase):
|
|||
actual = api.prepare_containerfile("/work", "/home/Dockerfile")
|
||||
self.assertRegex(actual, r"\.containerfile\..*")
|
||||
|
||||
def test_prepare_body_all_types(self):
|
||||
def test_prepare_body_all_types(self) -> None:
|
||||
payload = {
|
||||
"String": "string",
|
||||
"Integer": 42,
|
||||
|
@ -121,7 +121,7 @@ class TestUtilsCase(unittest.TestCase):
|
|||
actual = api.prepare_body(payload)
|
||||
self.assertEqual(actual, json.dumps(payload, sort_keys=True))
|
||||
|
||||
def test_prepare_body_none(self):
|
||||
def test_prepare_body_none(self) -> None:
|
||||
payload = {
|
||||
"String": "",
|
||||
"Integer": None,
|
||||
|
@ -133,8 +133,8 @@ class TestUtilsCase(unittest.TestCase):
|
|||
actual = api.prepare_body(payload)
|
||||
self.assertEqual(actual, '{"Boolean": false}')
|
||||
|
||||
def test_prepare_body_embedded(self):
|
||||
payload = {
|
||||
def test_prepare_body_embedded(self) -> None:
|
||||
payload: dict[str, Any] = {
|
||||
"String": "",
|
||||
"Integer": None,
|
||||
"Boolean": False,
|
||||
|
@ -154,7 +154,7 @@ class TestUtilsCase(unittest.TestCase):
|
|||
self.assertDictEqual(actual_dict["Dictionary"], payload["Dictionary"])
|
||||
self.assertEqual(set(actual_dict["Set1"]), {"item1", "item2"})
|
||||
|
||||
def test_prepare_body_dict_empty_string(self):
|
||||
def test_prepare_body_dict_empty_string(self) -> None:
|
||||
payload = {"Dictionary": {"key1": "", "key2": {"key3": ""}, "key4": [], "key5": {}}}
|
||||
|
||||
actual = api.prepare_body(payload)
|
||||
|
@ -164,6 +164,15 @@ class TestUtilsCase(unittest.TestCase):
|
|||
|
||||
self.assertDictEqual(payload, actual_dict)
|
||||
|
||||
def test_encode_auth_header(self):
|
||||
auth_config = {
|
||||
"username": "user",
|
||||
"password": "pass",
|
||||
}
|
||||
expected = b"eyJ1c2VybmFtZSI6ICJ1c2VyIiwgInBhc3N3b3JkIjogInBhc3MifQ=="
|
||||
actual = api.encode_auth_header(auth_config)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -8,12 +8,13 @@ except ImportError:
|
|||
# Python < 3.10
|
||||
from collections.abc import Iterator
|
||||
|
||||
from unittest.mock import DEFAULT, patch, MagicMock
|
||||
from unittest.mock import DEFAULT, MagicMock, patch
|
||||
|
||||
import requests_mock
|
||||
|
||||
from podman import PodmanClient, tests
|
||||
from podman.domain.containers import Container
|
||||
from podman.domain.containers_create import CreateMixin
|
||||
from podman.domain.containers_manager import ContainersManager
|
||||
from podman.errors import ImageNotFound, NotFound
|
||||
|
||||
|
@ -64,7 +65,8 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
"87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
)
|
||||
self.assertEqual(
|
||||
actual.id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
actual.id,
|
||||
"87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
|
@ -104,10 +106,12 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
actual[0].id,
|
||||
"87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
)
|
||||
self.assertEqual(
|
||||
actual[1].id, "6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03"
|
||||
actual[1].id,
|
||||
"6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03",
|
||||
)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
|
@ -132,10 +136,12 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
actual[0].id,
|
||||
"87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
)
|
||||
self.assertEqual(
|
||||
actual[1].id, "6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03"
|
||||
actual[1].id,
|
||||
"6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03",
|
||||
)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
|
@ -147,6 +153,24 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
actual = self.client.containers.list()
|
||||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id,
|
||||
"87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
)
|
||||
self.assertEqual(
|
||||
actual[1].id,
|
||||
"6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03",
|
||||
)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_list_sparse_libpod_default(self, mock):
|
||||
mock.get(
|
||||
tests.LIBPOD_URL + "/containers/json",
|
||||
json=[FIRST_CONTAINER, SECOND_CONTAINER],
|
||||
)
|
||||
actual = self.client.containers.list()
|
||||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
)
|
||||
|
@ -154,6 +178,118 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
actual[1].id, "6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03"
|
||||
)
|
||||
|
||||
# Verify that no individual reload() calls were made for sparse=True (default)
|
||||
# Should be only 1 request for the list endpoint
|
||||
self.assertEqual(len(mock.request_history), 1)
|
||||
# lower() needs to be enforced since the mocked url is transformed as lowercase and
|
||||
# this avoids %2f != %2F errors. Same applies for other instances of assertEqual
|
||||
self.assertEqual(mock.request_history[0].url, tests.LIBPOD_URL.lower() + "/containers/json")
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_list_sparse_libpod_false(self, mock):
|
||||
mock.get(
|
||||
tests.LIBPOD_URL + "/containers/json",
|
||||
json=[FIRST_CONTAINER, SECOND_CONTAINER],
|
||||
)
|
||||
# Mock individual container detail endpoints for reload() calls
|
||||
# that are done for sparse=False
|
||||
mock.get(
|
||||
tests.LIBPOD_URL + f"/containers/{FIRST_CONTAINER['Id']}/json",
|
||||
json=FIRST_CONTAINER,
|
||||
)
|
||||
mock.get(
|
||||
tests.LIBPOD_URL + f"/containers/{SECOND_CONTAINER['Id']}/json",
|
||||
json=SECOND_CONTAINER,
|
||||
)
|
||||
actual = self.client.containers.list(sparse=False)
|
||||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
)
|
||||
self.assertEqual(
|
||||
actual[1].id, "6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03"
|
||||
)
|
||||
|
||||
# Verify that individual reload() calls were made for sparse=False
|
||||
# Should be 3 requests total: 1 for list + 2 for individual container details
|
||||
self.assertEqual(len(mock.request_history), 3)
|
||||
|
||||
# Verify the list endpoint was called first
|
||||
self.assertEqual(mock.request_history[0].url, tests.LIBPOD_URL.lower() + "/containers/json")
|
||||
|
||||
# Verify the individual container detail endpoints were called
|
||||
individual_urls = {req.url for req in mock.request_history[1:]}
|
||||
expected_urls = {
|
||||
tests.LIBPOD_URL.lower() + f"/containers/{FIRST_CONTAINER['Id']}/json",
|
||||
tests.LIBPOD_URL.lower() + f"/containers/{SECOND_CONTAINER['Id']}/json",
|
||||
}
|
||||
self.assertEqual(individual_urls, expected_urls)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_list_sparse_compat_default(self, mock):
|
||||
mock.get(
|
||||
tests.COMPATIBLE_URL + "/containers/json",
|
||||
json=[FIRST_CONTAINER, SECOND_CONTAINER],
|
||||
)
|
||||
# Mock individual container detail endpoints for reload() calls
|
||||
# that are done for sparse=False
|
||||
mock.get(
|
||||
tests.COMPATIBLE_URL + f"/containers/{FIRST_CONTAINER['Id']}/json",
|
||||
json=FIRST_CONTAINER,
|
||||
)
|
||||
mock.get(
|
||||
tests.COMPATIBLE_URL + f"/containers/{SECOND_CONTAINER['Id']}/json",
|
||||
json=SECOND_CONTAINER,
|
||||
)
|
||||
actual = self.client.containers.list(compatible=True)
|
||||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
)
|
||||
self.assertEqual(
|
||||
actual[1].id, "6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03"
|
||||
)
|
||||
|
||||
# Verify that individual reload() calls were made for compat default (sparse=True)
|
||||
# Should be 3 requests total: 1 for list + 2 for individual container details
|
||||
self.assertEqual(len(mock.request_history), 3)
|
||||
self.assertEqual(
|
||||
mock.request_history[0].url, tests.COMPATIBLE_URL.lower() + "/containers/json"
|
||||
)
|
||||
|
||||
# Verify the individual container detail endpoints were called
|
||||
individual_urls = {req.url for req in mock.request_history[1:]}
|
||||
expected_urls = {
|
||||
tests.COMPATIBLE_URL.lower() + f"/containers/{FIRST_CONTAINER['Id']}/json",
|
||||
tests.COMPATIBLE_URL.lower() + f"/containers/{SECOND_CONTAINER['Id']}/json",
|
||||
}
|
||||
self.assertEqual(individual_urls, expected_urls)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_list_sparse_compat_true(self, mock):
|
||||
mock.get(
|
||||
tests.COMPATIBLE_URL + "/containers/json",
|
||||
json=[FIRST_CONTAINER, SECOND_CONTAINER],
|
||||
)
|
||||
actual = self.client.containers.list(sparse=True, compatible=True)
|
||||
self.assertIsInstance(actual, list)
|
||||
|
||||
self.assertEqual(
|
||||
actual[0].id, "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
)
|
||||
self.assertEqual(
|
||||
actual[1].id, "6dc84cc0a46747da94e4c1571efcc01a756b4017261440b4b8985d37203c3c03"
|
||||
)
|
||||
|
||||
# Verify that no individual reload() calls were made for sparse=True
|
||||
# Should be only 1 request for the list endpoint
|
||||
self.assertEqual(len(mock.request_history), 1)
|
||||
self.assertEqual(
|
||||
mock.request_history[0].url, tests.COMPATIBLE_URL.lower() + "/containers/json"
|
||||
)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_prune(self, mock):
|
||||
mock.post(
|
||||
|
@ -228,8 +364,8 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
json=FIRST_CONTAINER,
|
||||
)
|
||||
|
||||
port_str = {'2233': 3333}
|
||||
port_str_protocol = {'2244/tcp': 3344}
|
||||
port_str = {"2233": 3333}
|
||||
port_str_protocol = {"2244/tcp": 3344}
|
||||
port_int = {2255: 3355}
|
||||
ports = {**port_str, **port_str_protocol, **port_int}
|
||||
self.client.containers.create("fedora", "/usr/bin/ls", ports=ports)
|
||||
|
@ -237,26 +373,74 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
self.client.containers.client.post.assert_called()
|
||||
expected_ports = [
|
||||
{
|
||||
'container_port': 2233,
|
||||
'host_port': 3333,
|
||||
'protocol': 'tcp',
|
||||
"container_port": 2233,
|
||||
"host_port": 3333,
|
||||
"protocol": "tcp",
|
||||
},
|
||||
{
|
||||
'container_port': 2244,
|
||||
'host_port': 3344,
|
||||
'protocol': 'tcp',
|
||||
"container_port": 2244,
|
||||
"host_port": 3344,
|
||||
"protocol": "tcp",
|
||||
},
|
||||
{
|
||||
'container_port': 2255,
|
||||
'host_port': 3355,
|
||||
'protocol': 'tcp',
|
||||
"container_port": 2255,
|
||||
"host_port": 3355,
|
||||
"protocol": "tcp",
|
||||
},
|
||||
]
|
||||
actual_ports = json.loads(self.client.containers.client.post.call_args[1]['data'])[
|
||||
'portmappings'
|
||||
actual_ports = json.loads(self.client.containers.client.post.call_args[1]["data"])[
|
||||
"portmappings"
|
||||
]
|
||||
self.assertEqual(expected_ports, actual_ports)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_create_userns_mode_simple(self, mock):
|
||||
mock_response = MagicMock()
|
||||
mock_response.json = lambda: {
|
||||
"Id": "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
"Size": 1024,
|
||||
}
|
||||
self.client.containers.client.post = MagicMock(return_value=mock_response)
|
||||
mock.get(
|
||||
tests.LIBPOD_URL
|
||||
+ "/containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
json=FIRST_CONTAINER,
|
||||
)
|
||||
|
||||
userns = "keep-id"
|
||||
self.client.containers.create("fedora", "/usr/bin/ls", userns_mode=userns)
|
||||
self.client.containers.client.post.assert_called()
|
||||
expected_userns = {"nsmode": userns}
|
||||
|
||||
actual_userns = json.loads(self.client.containers.client.post.call_args[1]["data"])[
|
||||
"userns"
|
||||
]
|
||||
self.assertEqual(expected_userns, actual_userns)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_create_userns_mode_dict(self, mock):
|
||||
mock_response = MagicMock()
|
||||
mock_response.json = lambda: {
|
||||
"Id": "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
"Size": 1024,
|
||||
}
|
||||
self.client.containers.client.post = MagicMock(return_value=mock_response)
|
||||
mock.get(
|
||||
tests.LIBPOD_URL
|
||||
+ "/containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
json=FIRST_CONTAINER,
|
||||
)
|
||||
|
||||
userns = {"nsmode": "keep-id", "value": "uid=900"}
|
||||
self.client.containers.create("fedora", "/usr/bin/ls", userns_mode=userns)
|
||||
self.client.containers.client.post.assert_called()
|
||||
expected_userns = dict(**userns)
|
||||
|
||||
actual_userns = json.loads(self.client.containers.client.post.call_args[1]["data"])[
|
||||
"userns"
|
||||
]
|
||||
self.assertEqual(expected_userns, actual_userns)
|
||||
|
||||
def test_create_unsupported_key(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self.client.containers.create("fedora", "/usr/bin/ls", blkio_weight=100.0)
|
||||
|
@ -265,6 +449,127 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
with self.assertRaises(TypeError):
|
||||
self.client.containers.create("fedora", "/usr/bin/ls", unknown_key=100.0)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_create_convert_env_list_to_dict(self, mock):
|
||||
env_list1 = ["FOO=foo", "BAR=bar"]
|
||||
# Test valid list
|
||||
converted_dict1 = {"FOO": "foo", "BAR": "bar"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list1), converted_dict1)
|
||||
|
||||
# Test empty string
|
||||
env_list2 = ["FOO=foo", ""]
|
||||
self.assertRaises(ValueError, CreateMixin._convert_env_list_to_dict, env_list2)
|
||||
|
||||
# Test non iterable
|
||||
env_list3 = ["FOO=foo", None]
|
||||
self.assertRaises(TypeError, CreateMixin._convert_env_list_to_dict, env_list3)
|
||||
|
||||
# Test iterable with non string element
|
||||
env_list4 = ["FOO=foo", []]
|
||||
self.assertRaises(TypeError, CreateMixin._convert_env_list_to_dict, env_list4)
|
||||
|
||||
# Test empty list
|
||||
env_list5 = []
|
||||
converted_dict5 = {}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list5), converted_dict5)
|
||||
|
||||
# Test single valid environment variable
|
||||
env_list6 = ["SINGLE=value"]
|
||||
converted_dict6 = {"SINGLE": "value"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list6), converted_dict6)
|
||||
|
||||
# Test environment variable with empty value
|
||||
env_list7 = ["EMPTY="]
|
||||
converted_dict7 = {"EMPTY": ""}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list7), converted_dict7)
|
||||
|
||||
# Test environment variable with multiple equals signs
|
||||
env_list8 = ["URL=https://example.com/path?param=value"]
|
||||
converted_dict8 = {"URL": "https://example.com/path?param=value"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list8), converted_dict8)
|
||||
|
||||
# Test environment variable with spaces in value
|
||||
env_list9 = ["MESSAGE=Hello World", "PATH=/usr/local/bin:/usr/bin"]
|
||||
converted_dict9 = {"MESSAGE": "Hello World", "PATH": "/usr/local/bin:/usr/bin"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list9), converted_dict9)
|
||||
|
||||
# Test environment variable with special characters
|
||||
env_list10 = ["SPECIAL=!@#$%^&*()_+-=[]{}|;':\",./<>?"]
|
||||
converted_dict10 = {"SPECIAL": "!@#$%^&*()_+-=[]{}|;':\",./<>?"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list10), converted_dict10)
|
||||
|
||||
# Test environment variable with numeric values
|
||||
env_list11 = ["PORT=8080", "TIMEOUT=30"]
|
||||
converted_dict11 = {"PORT": "8080", "TIMEOUT": "30"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list11), converted_dict11)
|
||||
|
||||
# Test environment variable with boolean-like values
|
||||
env_list12 = ["DEBUG=true", "VERBOSE=false", "ENABLED=1", "DISABLED=0"]
|
||||
converted_dict12 = {
|
||||
"DEBUG": "true",
|
||||
"VERBOSE": "false",
|
||||
"ENABLED": "1",
|
||||
"DISABLED": "0",
|
||||
}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list12), converted_dict12)
|
||||
|
||||
# Test environment variable with whitespace in key (should preserve)
|
||||
env_list13 = [" SPACED_KEY =value", "KEY= spaced_value "]
|
||||
converted_dict13 = {" SPACED_KEY ": "value", "KEY": " spaced_value "}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list13), converted_dict13)
|
||||
|
||||
# Test missing equals sign
|
||||
env_list14 = ["FOO=foo", "INVALID"]
|
||||
self.assertRaises(ValueError, CreateMixin._convert_env_list_to_dict, env_list14)
|
||||
|
||||
# Test environment variable with only equals sign (empty key)
|
||||
env_list15 = ["FOO=foo", "=value"]
|
||||
self.assertRaises(ValueError, CreateMixin._convert_env_list_to_dict, env_list15)
|
||||
|
||||
# Test environment variable with only whitespace key
|
||||
env_list16 = ["FOO=foo", " =value"]
|
||||
self.assertRaises(ValueError, CreateMixin._convert_env_list_to_dict, env_list16)
|
||||
|
||||
# Test whitespace-only string
|
||||
env_list17 = ["FOO=foo", " "]
|
||||
self.assertRaises(ValueError, CreateMixin._convert_env_list_to_dict, env_list17)
|
||||
|
||||
# Test various non-string types in list
|
||||
env_list18 = ["FOO=foo", 123]
|
||||
self.assertRaises(TypeError, CreateMixin._convert_env_list_to_dict, env_list18)
|
||||
|
||||
env_list19 = ["FOO=foo", {"key": "value"}]
|
||||
self.assertRaises(TypeError, CreateMixin._convert_env_list_to_dict, env_list19)
|
||||
|
||||
env_list20 = ["FOO=foo", True]
|
||||
self.assertRaises(TypeError, CreateMixin._convert_env_list_to_dict, env_list20)
|
||||
|
||||
# Test duplicate keys (last one should win)
|
||||
env_list21 = ["KEY=first", "KEY=second", "OTHER=value"]
|
||||
converted_dict21 = {"KEY": "second", "OTHER": "value"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list21), converted_dict21)
|
||||
|
||||
# Test very long environment variable
|
||||
long_value = "x" * 1000
|
||||
env_list22 = [f"LONG_VAR={long_value}"]
|
||||
converted_dict22 = {"LONG_VAR": long_value}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list22), converted_dict22)
|
||||
|
||||
# Test environment variable with newlines and tabs
|
||||
env_list23 = ["MULTILINE=line1\nline2\ttabbed"]
|
||||
converted_dict23 = {"MULTILINE": "line1\nline2\ttabbed"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list23), converted_dict23)
|
||||
|
||||
# Test environment variable with unicode characters
|
||||
env_list24 = ["UNICODE=こんにちは", "EMOJI=🚀🌟"]
|
||||
converted_dict24 = {"UNICODE": "こんにちは", "EMOJI": "🚀🌟"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list24), converted_dict24)
|
||||
|
||||
# Test case sensitivity
|
||||
env_list25 = ["path=/usr/bin", "PATH=/usr/local/bin"]
|
||||
converted_dict25 = {"path": "/usr/bin", "PATH": "/usr/local/bin"}
|
||||
self.assertEqual(CreateMixin._convert_env_list_to_dict(env_list25), converted_dict25)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_run_detached(self, mock):
|
||||
mock.post(
|
||||
|
@ -327,7 +632,7 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
|
||||
actual = self.client.containers.run("fedora", "/usr/bin/ls")
|
||||
self.assertIsInstance(actual, bytes)
|
||||
self.assertEqual(actual, b'This is a unittest - line 1This is a unittest - line 2')
|
||||
self.assertEqual(actual, b"This is a unittest - line 1This is a unittest - line 2")
|
||||
|
||||
# iter() cannot be reset so subtests used to create new instance
|
||||
with self.subTest("Stream results"):
|
||||
|
@ -340,5 +645,5 @@ class ContainersManagerTestCase(unittest.TestCase):
|
|||
self.assertEqual(next(actual), b"This is a unittest - line 2")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
import unittest
|
||||
|
||||
import requests_mock
|
||||
|
||||
from podman import PodmanClient, tests
|
||||
|
||||
|
||||
CONTAINER = {
|
||||
"Id": "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd",
|
||||
"Name": "quay.io/fedora:latest",
|
||||
"Image": "eloquent_pare",
|
||||
"State": {"Status": "running"},
|
||||
}
|
||||
|
||||
|
||||
class PodmanResourceTestCase(unittest.TestCase):
|
||||
"""Test PodmanResource area of concern."""
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
self.client = PodmanClient(base_url=tests.BASE_SOCK)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
super().tearDown()
|
||||
|
||||
self.client.close()
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_reload_with_compatible_options(self, mock):
|
||||
"""Test that reload uses the correct endpoint."""
|
||||
|
||||
# Mock the get() call
|
||||
mock.get(
|
||||
f"{tests.LIBPOD_URL}/"
|
||||
f"containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
json=CONTAINER,
|
||||
)
|
||||
|
||||
# Mock the reload() call
|
||||
mock.get(
|
||||
f"{tests.LIBPOD_URL}/"
|
||||
f"containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
json=CONTAINER,
|
||||
)
|
||||
|
||||
# Mock the reload(compatible=False) call
|
||||
mock.get(
|
||||
f"{tests.LIBPOD_URL}/"
|
||||
f"containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
json=CONTAINER,
|
||||
)
|
||||
|
||||
# Mock the reload(compatible=True) call
|
||||
mock.get(
|
||||
f"{tests.COMPATIBLE_URL}/"
|
||||
f"containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
json=CONTAINER,
|
||||
)
|
||||
|
||||
container = self.client.containers.get(
|
||||
"87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd"
|
||||
)
|
||||
container.reload()
|
||||
container.reload(compatible=False)
|
||||
container.reload(compatible=True)
|
||||
|
||||
self.assertEqual(len(mock.request_history), 4)
|
||||
for i in range(3):
|
||||
self.assertEqual(
|
||||
mock.request_history[i].url,
|
||||
tests.LIBPOD_URL.lower()
|
||||
+ "/containers/"
|
||||
+ "87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
)
|
||||
self.assertEqual(
|
||||
mock.request_history[3].url,
|
||||
tests.COMPATIBLE_URL.lower()
|
||||
+ "/containers/87e1325c82424e49a00abdd4de08009eb76c7de8d228426a9b8af9318ced5ecd/json",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -517,7 +517,7 @@ class ImagesManagerTestCase(unittest.TestCase):
|
|||
self.assertEqual(report[0]["name"], "quay.io/libpod/fedora")
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_search_listTags(self, mock):
|
||||
def test_search_list_tags(self, mock):
|
||||
mock.get(
|
||||
tests.LIBPOD_URL + "/images/search?term=fedora&noTrunc=true&listTags=true",
|
||||
json=[
|
||||
|
@ -649,6 +649,27 @@ class ImagesManagerTestCase(unittest.TestCase):
|
|||
images[1].id, "c4b16966ecd94ffa910eab4e630e24f259bf34a87e924cd4b1434f267b0e354e"
|
||||
)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_pull_policy(self, mock):
|
||||
image_id = "sha256:326dd9d7add24646a325e8eaa82125294027db2332e49c5828d96312c5d773ab"
|
||||
mock.post(
|
||||
tests.LIBPOD_URL + "/images/pull?reference=quay.io%2ffedora%3Alatest&policy=missing",
|
||||
json={
|
||||
"error": "",
|
||||
"id": image_id,
|
||||
"images": [image_id],
|
||||
"stream": "",
|
||||
},
|
||||
)
|
||||
mock.get(
|
||||
tests.LIBPOD_URL + "/images"
|
||||
"/sha256%3A326dd9d7add24646a325e8eaa82125294027db2332e49c5828d96312c5d773ab/json",
|
||||
json=FIRST_IMAGE,
|
||||
)
|
||||
|
||||
image = self.client.images.pull("quay.io/fedora:latest", policy="missing")
|
||||
self.assertEqual(image.id, image_id)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_list_with_name_parameter(self, mock):
|
||||
"""Test that name parameter is correctly converted to a reference filter"""
|
||||
|
|
|
@ -1,8 +1,15 @@
|
|||
import unittest
|
||||
|
||||
import requests_mock
|
||||
|
||||
from podman import PodmanClient, tests
|
||||
from podman.domain.manifests import Manifest, ManifestsManager
|
||||
|
||||
FIRST_MANIFEST = {
|
||||
"Id": "326dd9d7add24646a389e8eaa82125294027db2332e49c5828d96312c5d773ab",
|
||||
"names": "quay.io/fedora:latest",
|
||||
}
|
||||
|
||||
|
||||
class ManifestTestCase(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
|
@ -23,6 +30,34 @@ class ManifestTestCase(unittest.TestCase):
|
|||
manifest = Manifest()
|
||||
self.assertIsNone(manifest.name)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_push(self, mock):
|
||||
adapter = mock.post(
|
||||
tests.LIBPOD_URL + "/manifests/quay.io%2Ffedora%3Alatest/registry/quay.io%2Ffedora%3Av1"
|
||||
)
|
||||
|
||||
manifest = Manifest(attrs=FIRST_MANIFEST, client=self.client.api)
|
||||
manifest.push(destination="quay.io/fedora:v1")
|
||||
|
||||
self.assertTrue(adapter.called_once)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_push_with_auth(self, mock):
|
||||
adapter = mock.post(
|
||||
tests.LIBPOD_URL
|
||||
+ "/manifests/quay.io%2Ffedora%3Alatest/registry/quay.io%2Ffedora%3Av1",
|
||||
request_headers={
|
||||
"X-Registry-Auth": b"eyJ1c2VybmFtZSI6ICJ1c2VyIiwgInBhc3N3b3JkIjogInBhc3MifQ=="
|
||||
},
|
||||
)
|
||||
|
||||
manifest = Manifest(attrs=FIRST_MANIFEST, client=self.client.api)
|
||||
manifest.push(
|
||||
destination="quay.io/fedora:v1", auth_config={"username": "user", "password": "pass"}
|
||||
)
|
||||
|
||||
self.assertTrue(adapter.called_once)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -13,7 +13,7 @@ from podman import api
|
|||
|
||||
|
||||
class ParseUtilsTestCase(unittest.TestCase):
|
||||
def test_parse_repository(self):
|
||||
def test_parse_repository(self) -> None:
|
||||
@dataclass
|
||||
class TestCase:
|
||||
name: str
|
||||
|
@ -29,14 +29,19 @@ class ParseUtilsTestCase(unittest.TestCase):
|
|||
),
|
||||
TestCase(
|
||||
name="@digest",
|
||||
input="quay.io/libpod/testimage@71f1b47263fc",
|
||||
expected=("quay.io/libpod/testimage", "71f1b47263fc"),
|
||||
input="quay.io/libpod/testimage@sha256:71f1b47263fc",
|
||||
expected=("quay.io/libpod/testimage@sha256", "71f1b47263fc"),
|
||||
),
|
||||
TestCase(
|
||||
name=":tag",
|
||||
input="quay.io/libpod/testimage:latest",
|
||||
expected=("quay.io/libpod/testimage", "latest"),
|
||||
),
|
||||
TestCase(
|
||||
name=":tag@digest",
|
||||
input="quay.io/libpod/testimage:latest@sha256:71f1b47263fc",
|
||||
expected=("quay.io/libpod/testimage:latest@sha256", "71f1b47263fc"),
|
||||
),
|
||||
TestCase(
|
||||
name=":port",
|
||||
input="quay.io:5000/libpod/testimage",
|
||||
|
@ -44,14 +49,19 @@ class ParseUtilsTestCase(unittest.TestCase):
|
|||
),
|
||||
TestCase(
|
||||
name=":port@digest",
|
||||
input="quay.io:5000/libpod/testimage@71f1b47263fc",
|
||||
expected=("quay.io:5000/libpod/testimage", "71f1b47263fc"),
|
||||
input="quay.io:5000/libpod/testimage@sha256:71f1b47263fc",
|
||||
expected=("quay.io:5000/libpod/testimage@sha256", "71f1b47263fc"),
|
||||
),
|
||||
TestCase(
|
||||
name=":port:tag",
|
||||
input="quay.io:5000/libpod/testimage:latest",
|
||||
expected=("quay.io:5000/libpod/testimage", "latest"),
|
||||
),
|
||||
TestCase(
|
||||
name=":port:tag:digest",
|
||||
input="quay.io:5000/libpod/testimage:latest@sha256:71f1b47263fc",
|
||||
expected=("quay.io:5000/libpod/testimage:latest@sha256", "71f1b47263fc"),
|
||||
),
|
||||
]
|
||||
|
||||
for case in cases:
|
||||
|
@ -62,13 +72,13 @@ class ParseUtilsTestCase(unittest.TestCase):
|
|||
f"failed test {case.name} expected {case.expected}, actual {actual}",
|
||||
)
|
||||
|
||||
def test_decode_header(self):
|
||||
def test_decode_header(self) -> None:
|
||||
actual = api.decode_header("eyJIZWFkZXIiOiJ1bml0dGVzdCJ9")
|
||||
self.assertDictEqual(actual, {"Header": "unittest"})
|
||||
|
||||
self.assertDictEqual(api.decode_header(None), {})
|
||||
|
||||
def test_prepare_timestamp(self):
|
||||
def test_prepare_timestamp(self) -> None:
|
||||
time = datetime.datetime(2022, 1, 24, 12, 0, 0)
|
||||
self.assertEqual(api.prepare_timestamp(time), 1643025600)
|
||||
self.assertEqual(api.prepare_timestamp(2), 2)
|
||||
|
@ -77,11 +87,11 @@ class ParseUtilsTestCase(unittest.TestCase):
|
|||
with self.assertRaises(ValueError):
|
||||
api.prepare_timestamp("bad input") # type: ignore
|
||||
|
||||
def test_prepare_cidr(self):
|
||||
def test_prepare_cidr(self) -> None:
|
||||
net = ipaddress.IPv4Network("127.0.0.0/24")
|
||||
self.assertEqual(api.prepare_cidr(net), ("127.0.0.0", "////AA=="))
|
||||
|
||||
def test_stream_helper(self):
|
||||
def test_stream_helper(self) -> None:
|
||||
streamed_results = [b'{"test":"val1"}', b'{"test":"val2"}']
|
||||
mock_response = mock.Mock(spec=Response)
|
||||
mock_response.iter_lines.return_value = iter(streamed_results)
|
||||
|
@ -93,7 +103,7 @@ class ParseUtilsTestCase(unittest.TestCase):
|
|||
self.assertIsInstance(actual, bytes)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_stream_helper_with_decode(self):
|
||||
def test_stream_helper_with_decode(self) -> None:
|
||||
streamed_results = [b'{"test":"val1"}', b'{"test":"val2"}']
|
||||
mock_response = mock.Mock(spec=Response)
|
||||
mock_response.iter_lines.return_value = iter(streamed_results)
|
||||
|
@ -103,7 +113,7 @@ class ParseUtilsTestCase(unittest.TestCase):
|
|||
self.assertIsInstance(streamable, Iterable)
|
||||
for expected, actual in zip(streamed_results, streamable):
|
||||
self.assertIsInstance(actual, dict)
|
||||
self.assertDictEqual(json.loads(expected), actual)
|
||||
self.assertDictEqual(json.loads(expected), actual) # type: ignore[arg-type]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -149,7 +149,7 @@ class PodTestCase(unittest.TestCase):
|
|||
def test_stop(self, mock):
|
||||
adapter = mock.post(
|
||||
tests.LIBPOD_URL
|
||||
+ "/pods/c8b9f5b17dc1406194010c752fc6dcb330192032e27648db9b14060447ecf3b8/stop?t=70.0",
|
||||
+ "/pods/c8b9f5b17dc1406194010c752fc6dcb330192032e27648db9b14060447ecf3b8/stop?t=70",
|
||||
json={
|
||||
"Errs": [],
|
||||
"Id": "c8b9f5b17dc1406194010c752fc6dcb330192032e27648db9b14060447ecf3b8",
|
||||
|
@ -157,7 +157,7 @@ class PodTestCase(unittest.TestCase):
|
|||
)
|
||||
|
||||
pod = Pod(attrs=FIRST_POD, client=self.client.api)
|
||||
pod.stop(timeout=70.0)
|
||||
pod.stop(timeout=70)
|
||||
self.assertTrue(adapter.called_once)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
import unittest
|
||||
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from podman.tests import utils
|
||||
|
||||
|
||||
class TestPodmanVersion(unittest.TestCase):
|
||||
@patch('podman.tests.utils.subprocess.Popen')
|
||||
def test_podman_version(self, mock_popen):
|
||||
mock_proc = MagicMock()
|
||||
mock_proc.stdout.read.return_value = b'5.6.0'
|
||||
mock_popen.return_value.__enter__.return_value = mock_proc
|
||||
self.assertEqual(utils.podman_version(), (5, 6, 0))
|
||||
|
||||
@patch('podman.tests.utils.subprocess.Popen')
|
||||
def test_podman_version_dev(self, mock_popen):
|
||||
mock_proc = MagicMock()
|
||||
mock_proc.stdout.read.return_value = b'5.6.0-dev'
|
||||
mock_popen.return_value.__enter__.return_value = mock_proc
|
||||
self.assertEqual(utils.podman_version(), (5, 6, 0))
|
||||
|
||||
@patch('podman.tests.utils.subprocess.Popen')
|
||||
def test_podman_version_four_digits(self, mock_popen):
|
||||
mock_proc = MagicMock()
|
||||
mock_proc.stdout.read.return_value = b'5.6.0.1'
|
||||
mock_popen.return_value.__enter__.return_value = mock_proc
|
||||
self.assertEqual(utils.podman_version(), (5, 6, 0))
|
||||
|
||||
@patch('podman.tests.utils.subprocess.Popen')
|
||||
def test_podman_version_release_candidate(self, mock_popen):
|
||||
mock_proc = MagicMock()
|
||||
mock_proc.stdout.read.return_value = b'5.6.0-rc1'
|
||||
mock_popen.return_value.__enter__.return_value = mock_proc
|
||||
self.assertEqual(utils.podman_version(), (5, 6, 0))
|
||||
|
||||
@patch('podman.tests.utils.subprocess.Popen')
|
||||
def test_podman_version_none(self, mock_popen):
|
||||
mock_proc = MagicMock()
|
||||
mock_proc.stdout.read.return_value = b''
|
||||
mock_popen.return_value.__enter__.return_value = mock_proc
|
||||
with self.assertRaises(RuntimeError) as context:
|
||||
utils.podman_version()
|
||||
self.assertEqual(str(context.exception), "Unable to detect podman version. Got \"\"")
|
|
@ -39,6 +39,13 @@ class VolumeTestCase(unittest.TestCase):
|
|||
volume.remove(force=True)
|
||||
self.assertTrue(adapter.called_once)
|
||||
|
||||
@requests_mock.Mocker()
|
||||
def test_inspect(self, mock):
|
||||
mock.get(tests.LIBPOD_URL + "/volumes/dbase/json?tlsVerify=False", json=FIRST_VOLUME)
|
||||
vol_manager = VolumesManager(self.client.api)
|
||||
actual = vol_manager.prepare_model(attrs=FIRST_VOLUME)
|
||||
self.assertEqual(actual.inspect(tls_verify=False)["Mountpoint"], "/var/database")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
import pathlib
|
||||
import csv
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
from platform import freedesktop_os_release
|
||||
except ImportError:
|
||||
|
||||
def freedesktop_os_release() -> dict[str, str]:
|
||||
"""This is a fallback for platforms that don't have the freedesktop_os_release function.
|
||||
Python < 3.10
|
||||
"""
|
||||
path = pathlib.Path("/etc/os-release")
|
||||
with open(path) as f:
|
||||
reader = csv.reader(f, delimiter="=")
|
||||
return dict(reader)
|
||||
|
||||
|
||||
def podman_version() -> tuple[int, ...]:
|
||||
cmd = ["podman", "info", "--format", "{{.Version.Version}}"]
|
||||
with subprocess.Popen(cmd, stdout=subprocess.PIPE) as proc:
|
||||
version = proc.stdout.read().decode("utf-8").strip()
|
||||
match = re.match(r"(\d+\.\d+\.\d+)", version)
|
||||
if not match:
|
||||
raise RuntimeError(f"Unable to detect podman version. Got \"{version}\"")
|
||||
version = match.group(1)
|
||||
return tuple(int(x) for x in version.split("."))
|
||||
|
||||
|
||||
OS_RELEASE = freedesktop_os_release()
|
||||
PODMAN_VERSION = podman_version()
|
|
@ -1,4 +1,4 @@
|
|||
"""Version of PodmanPy."""
|
||||
|
||||
__version__ = "5.4.0"
|
||||
__version__ = "5.6.0"
|
||||
__compatible_version__ = "1.40"
|
||||
|
|
|
@ -4,7 +4,9 @@ build-backend = "setuptools.build_meta"
|
|||
|
||||
[project]
|
||||
name = "podman"
|
||||
dynamic = ["version"]
|
||||
# TODO: remove the line version = ... on podman-py > 5.4.0 releases
|
||||
# dynamic = ["version"]
|
||||
version = "5.6.0"
|
||||
description = "Bindings for Podman RESTful API"
|
||||
readme = "README.md"
|
||||
license = {file = "LICENSE"}
|
||||
|
@ -65,12 +67,17 @@ log_cli = true
|
|||
log_cli_level = "DEBUG"
|
||||
log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)"
|
||||
log_cli_date_format = "%Y-%m-%d %H:%M:%S"
|
||||
testpaths = [
|
||||
"podman/tests",
|
||||
]
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["podman"]
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
include = ["podman*"]
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
version = {attr = "podman.version.__version__"}
|
||||
# TODO: remove the line version = ... on podman-py > 5.4.0 releases
|
||||
# [tool.setuptools.dynamic]
|
||||
# version = {attr = "podman.version.__version__"}
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
|
@ -97,10 +104,10 @@ select = [
|
|||
"E", # Pycodestyle Error
|
||||
"W", # Pycodestyle Warning
|
||||
"N", # PEP8 Naming
|
||||
# TODO "UP", # Pyupgrade
|
||||
"UP", # Pyupgrade
|
||||
# TODO "ANN",
|
||||
# TODO "S", # Bandit
|
||||
# "B", # Bugbear
|
||||
"B", # Bugbear
|
||||
"A", # flake-8-builtins
|
||||
"YTT", # flake-8-2020
|
||||
"PLC", # Pylint Convention
|
||||
|
@ -110,20 +117,48 @@ select = [
|
|||
# Some checks should be enabled for code sanity disabled now
|
||||
# to avoid changing too many lines
|
||||
ignore = [
|
||||
"F821", # TODO Undefined name
|
||||
"F541", # TODO f-string is missing placeholders
|
||||
"F401", # TODO Module imported but unused
|
||||
"F841", # TODO Local variable is assigned to but never used
|
||||
"E402", # TODO Module level import not at top of file
|
||||
"E741", # TODO ambiguous variable name
|
||||
"E722", # TODO do not use bare 'except'
|
||||
"E501", # TODO line too long
|
||||
"N818", # TODO Error Suffix in exception name
|
||||
"N80", # TODO Invalid Name
|
||||
"ANN10", # Missing type annotation
|
||||
"PLW2901", # TODO Redefined Loop Name
|
||||
]
|
||||
[tool.ruff.lint.flake8-builtins]
|
||||
builtins-ignorelist = ["copyright", "all"]
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"podman/tests/*.py" = ["S"]
|
||||
|
||||
[tool.mypy]
|
||||
install_types = true
|
||||
non_interactive = true
|
||||
allow_redefinition = true
|
||||
no_strict_optional = true
|
||||
ignore_missing_imports = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"podman.api.adapter_utils",
|
||||
"podman.api.client",
|
||||
"podman.api.ssh",
|
||||
"podman.api.tar_utils",
|
||||
"podman.api.uds",
|
||||
"podman.domain.config",
|
||||
"podman.domain.containers",
|
||||
"podman.domain.containers_create",
|
||||
"podman.domain.containers_run",
|
||||
"podman.domain.events",
|
||||
"podman.domain.images_build",
|
||||
"podman.domain.images_manager",
|
||||
"podman.domain.manager",
|
||||
"podman.domain.manifests",
|
||||
"podman.domain.networks",
|
||||
"podman.domain.networks_manager",
|
||||
"podman.domain.pods",
|
||||
"podman.domain.pods_manager",
|
||||
"podman.domain.registry_data",
|
||||
"podman.domain.secrets",
|
||||
"podman.domain.volumes",
|
||||
"podman.errors.exceptions"
|
||||
]
|
||||
ignore_errors = true
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_also = [
|
||||
"unittest.main()",
|
||||
]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[metadata]
|
||||
name = podman
|
||||
version = 5.4.0
|
||||
version = 5.6.0
|
||||
author = Brent Baude, Jhon Honce, Urvashi Mohnani, Nicola Sella
|
||||
author_email = jhonce@redhat.com
|
||||
description = Bindings for Podman RESTful API
|
||||
|
|
2
setup.py
2
setup.py
|
@ -9,7 +9,7 @@ excluded = [
|
|||
]
|
||||
|
||||
|
||||
class build_py(build_py_orig):
|
||||
class build_py(build_py_orig): # noqa: N801
|
||||
def find_package_modules(self, package, package_dir):
|
||||
modules = super().find_package_modules(package, package_dir)
|
||||
return [
|
||||
|
|
|
@ -8,12 +8,12 @@ require:
|
|||
test: cd .. && make lint
|
||||
|
||||
/coverage_integration:
|
||||
tag: [ stable, coverage ]
|
||||
tag: [ stable, coverage, integration ]
|
||||
summary: Run integration tests coverage check
|
||||
test: cd .. && make integration
|
||||
|
||||
/coverage_unittest:
|
||||
tag: [ stable, coverage ]
|
||||
tag: [ stable, coverage, unittest ]
|
||||
summary: Run unit tests coverage check
|
||||
test: cd .. && make unittest
|
||||
|
||||
|
@ -24,8 +24,14 @@ require:
|
|||
test: cd .. && make tests-ci-base-python
|
||||
duration: 10m
|
||||
|
||||
/base_python_pnext:
|
||||
tag: [ pnext ]
|
||||
summary: Run all tests on the base python version and podman-next
|
||||
test: cd .. && make tests-ci-base-python-podman-next
|
||||
duration: 5m
|
||||
|
||||
/all_python:
|
||||
tag: [ matrix]
|
||||
summary: Run all tests for all ptyhon versions available
|
||||
tag: [ matrix ]
|
||||
summary: Run all tests for all python versions available
|
||||
test: cd .. && make tests-ci-all-python
|
||||
duration: 10m
|
||||
duration: 20m
|
||||
|
|
9
tox.ini
9
tox.ini
|
@ -17,7 +17,7 @@ setenv =
|
|||
commands = {posargs}
|
||||
|
||||
[testenv:lint]
|
||||
deps = ruff==0.8.1
|
||||
deps = ruff==0.12.8
|
||||
allowlist_externals = ruff
|
||||
commands = ruff check --diff
|
||||
|
||||
|
@ -29,7 +29,7 @@ commands =
|
|||
coverage report -m --skip-covered --fail-under=80 --omit=podman/tests/* --omit=.tox/*
|
||||
|
||||
[testenv:format]
|
||||
deps = ruff==0.8.1
|
||||
deps = ruff==0.12.8
|
||||
allowlist_externals = ruff
|
||||
commands =
|
||||
ruff format --diff
|
||||
|
@ -40,3 +40,8 @@ commands =
|
|||
deps = black
|
||||
commands =
|
||||
black {posargs} .
|
||||
|
||||
[testenv:mypy]
|
||||
deps = mypy
|
||||
commands =
|
||||
mypy --package podman
|
||||
|
|
Loading…
Reference in New Issue