From 88a82557a872e46fe2fa914388798f65c68f9233 Mon Sep 17 00:00:00 2001 From: Xiao Yu Date: Mon, 6 Dec 2021 23:36:27 +0000 Subject: [PATCH 01/11] Support Docker running under CGroups v1 and v2 Docker supports either CGroups v1 or v2 and while running under v2 the memory stat details change slightly. To account for FS cache we need to subtract out `memory_stats.stats.inactive_file` instead of `memory_stats.stats.total_cache` under v1. We found this despondency when running the latest `dockerd` on Debian Bullseye. --- check_docker/check_docker.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index 1a9d754..37bdd5d 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -520,7 +520,14 @@ def check_memory(container, thresholds): inspection = get_stats(container) # Subtracting cache to match what `docker stats` does. - adjusted_usage = inspection['memory_stats']['usage'] - inspection['memory_stats']['stats']['total_cache'] + adjusted_usage = inspection['memory_stats']['usage'] + if 'total_cache' in inspection['memory_stats']['stats']: + # CGroups v1 - https://www.kernel.org/doc/Documentation/cgroup-v1/memory.txt + adjusted_usage -= inspection['memory_stats']['stats']['total_cache'] + elif 'inactive_file' in inspection['memory_stats']['stats']: + # CGroups v2 - https://www.kernel.org/doc/Documentation/cgroup-v2.txt + adjusted_usage -= inspection['memory_stats']['stats']['inactive_file'] + if thresholds.units == '%': max = 100 usage = int(100 * adjusted_usage / inspection['memory_stats']['limit']) From c80247435b72a579c5b886f54db25ae2f471854e Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 30 Oct 2023 14:57:17 +0100 Subject: [PATCH 02/11] Update gitignore --- .gitignore | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/.gitignore b/.gitignore index 8f947c2..66f2d09 100644 --- a/.gitignore +++ b/.gitignore @@ -83,23 +83,14 @@ celerybeat-schedule venv/ ENV/ -# Spyder project settings +# Editors .spyderproject - -# Rope project settings .ropeproject - -.idea - - -## File-based project format: *.iws - -## Plugin-specific files: - -# IntelliJ +.idea /out/ - +\#* +.\#* # JIRA plugin atlassian-ide-plugin.xml From b98ea384c63b64568ee6984e4c9409bc6ec43695 Mon Sep 17 00:00:00 2001 From: Cyberes <64224601+Cyberes@users.noreply.github.com> Date: Wed, 3 May 2023 12:28:50 -0600 Subject: [PATCH 03/11] Add Docker daemon connection check --- check_docker/check_docker.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index 37bdd5d..eb7b105 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -941,8 +941,12 @@ def perform_checks(raw_args): return # Here is where all the work happens - ############################################################################################# - containers = get_containers(args.containers, args.present) + try: + containers = get_containers(args.containers, args.present) + except URLError as e: + critical(f'Failed to connect to daemon: {e.reason}.') + print_results() + exit(rc) if len(containers) == 0 and not args.present: unknown("No containers names found matching criteria") From e78f0475aa2952d59fbec78b2e4dcbb03eebb439 Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 30 Oct 2023 15:14:12 +0100 Subject: [PATCH 04/11] Fix issue with missing oauth fields --- check_docker/check_docker.py | 10 +++++----- tests/test_check_docker.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index eb7b105..3eca5d5 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -138,13 +138,13 @@ def http_response(self, request, response): https_response = http_response @staticmethod - def _get_outh2_token(www_authenticate_header): + def _get_oauth2_token(www_authenticate_header): auth_fields = dict(re.findall(r"""(?:(?P[^ ,=]+)="([^"]+)")""", www_authenticate_header)) auth_url = "{realm}?scope={scope}&service={service}".format( - realm=auth_fields['realm'], - scope=auth_fields['scope'], - service=auth_fields['service'], + realm=auth_fields.get('realm'), + scope=auth_fields.get('scope'), + service=auth_fields.get('service'), ) token_request = Request(auth_url) token_request.add_header("Content-Type", "application/x-www-form-urlencoded; charset=utf-8") @@ -160,7 +160,7 @@ def process_oauth2(self, request, response, www_authenticate_header): raise HTTPError(full_url, 401, "Stopping Oauth2 failure loop for {}".format(full_url), response.headers, response) - auth_token = self._get_outh2_token(www_authenticate_header) + auth_token = self._get_oauth2_token(www_authenticate_header) request.add_unredirected_header('Authorization', 'Bearer ' + auth_token) return self.parent.open(request, timeout=request.timeout) diff --git a/tests/test_check_docker.py b/tests/test_check_docker.py index f3c4324..a990164 100644 --- a/tests/test_check_docker.py +++ b/tests/test_check_docker.py @@ -90,7 +90,7 @@ def test_get_url_with_oauth2(check_docker): headers={'test': 'test'}) with patch('check_docker.check_docker.HTTPSHandler.https_open', side_effect=[mock_response1, mock_response2]), \ - patch('check_docker.check_docker.Oauth2TokenAuthHandler._get_outh2_token', + patch('check_docker.check_docker.Oauth2TokenAuthHandler._get_oauth2_token', return_value='test_token') as get_token: response = check_docker.get_url(url='https://example.com/test') assert response == ({"test_key": "test_value"}, 200) @@ -106,7 +106,7 @@ def mock_open(*args, **kwargs): return mock_response with patch('check_docker.check_docker.HTTPSHandler.https_open', side_effect=mock_open), \ - patch('check_docker.check_docker.Oauth2TokenAuthHandler._get_outh2_token', + patch('check_docker.check_docker.Oauth2TokenAuthHandler._get_oauth2_token', return_value='test_token') as get_token: with pytest.raises(HTTPError): check_docker.get_url(url='https://example.com/test') @@ -832,7 +832,7 @@ def test_get_manifest_auth_token(check_docker): expected_response = FakeHttpResponse(content=encoded, http_code=200) with patch('check_docker.check_docker.request.urlopen', return_value=expected_response): www_authenticate_header = 'Bearer realm="https://example.com/token",service="example.com",scope="repository:test:pull"' - token = check_docker.Oauth2TokenAuthHandler._get_outh2_token(www_authenticate_header) + token = check_docker.Oauth2TokenAuthHandler._get_oauth2_token(www_authenticate_header) assert token == 'test' From 39da9197d8c13ac28822aed3386ef8dbe90a880a Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 10:04:45 +0100 Subject: [PATCH 05/11] Remove Travis and Codeclimate Moving to GitHub Actions for this --- .codeclimate.yml | 13 ------------- .gitignore | 3 ++- .travis.yml | 14 -------------- 3 files changed, 2 insertions(+), 28 deletions(-) delete mode 100644 .codeclimate.yml delete mode 100644 .travis.yml diff --git a/.codeclimate.yml b/.codeclimate.yml deleted file mode 100644 index 189a543..0000000 --- a/.codeclimate.yml +++ /dev/null @@ -1,13 +0,0 @@ -languages: - Ruby: true - JavaScript: true - PHP: true - Python: true -exclude_paths: -- "check_docker/tests/*" -- "tests/*" -plugins: - radon: - enabled: true - sonar-python: - enabled: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 66f2d09..5e4d9a3 100644 --- a/.gitignore +++ b/.gitignore @@ -109,6 +109,7 @@ cr-sess1.json testing_tools/vagrant/.vagrant +.vagrant !check_docker/ -.DS_Store \ No newline at end of file +.DS_Store diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index c13df5f..0000000 --- a/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -language: python -python: - - "3.5" - - "3.6" - - "3.7" - - "3.8" -install: - - pip install pipenv - - pipenv install - - pipenv install codeclimate-test-reporter -# command to run tests -script: - - py.test --cov=check_docker - - codeclimate-test-reporter || echo "Ignoring Code Climate reporter upload failure" From 75bc72ca9c10df4aba25218f041c9d3fdd961b6f Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 10:21:42 +0100 Subject: [PATCH 06/11] Update Github Actions - Introduce requirements file - Add Makefile --- .github/dependabot.yml | 6 ++++ .github/workflows/tests.yml | 35 ++++++----------------- Makefile | 8 ++++++ Pipfile | 17 ------------ requirements-dev.txt | 4 +++ tests/test_version.py | 55 ------------------------------------- 6 files changed, 27 insertions(+), 98 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 Makefile delete mode 100644 Pipfile create mode 100644 requirements-dev.txt delete mode 100644 tests/test_version.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..ad45155 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: +- package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: monthly diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index bc48477..193ee2d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: Run tests +name: CI on: push: @@ -7,35 +7,18 @@ on: branches: [ master ] jobs: - build: - + gitHubActionForPytest: runs-on: ubuntu-latest strategy: matrix: - python_version: [3.6, 3.7, 3.8] - + python-version: ["3.9", "3.10"] + name: GitHub Action steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python_version }} + - name: Checkout + uses: actions/checkout@v6 - name: Install dependencies run: | - python -V - printenv - python -m pip install --upgrade pip - pip install flake8 pytest coverage pyfakefs pytest-cov - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with pytest + python -m pip install -r requirements-dev.txt + - name: Test and coverate run: | - pytest --cov=check_docker --cov-fail-under 90 --cov-report term --cov-report html - - uses: actions/upload-artifact@v2 - with: - name: coverage_report - path: htmlcov + make coverage diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..44a3f74 --- /dev/null +++ b/Makefile @@ -0,0 +1,8 @@ +.PHONY: lint test + +lint: + python -m pylint check_docker/ +test: + py.test -v +coverage: + py.test --cov=check_docker diff --git a/Pipfile b/Pipfile deleted file mode 100644 index 98780b4..0000000 --- a/Pipfile +++ /dev/null @@ -1,17 +0,0 @@ -[[source]] -name = "pypi" -url = "https://pypi.org/simple" -verify_ssl = true - -[packages] -tox = '*' -tox-pyenv = '*' -pytest = '*' -pytest-random-order = '*' -coverage = '>4.0,<4.4' -pyfakefs = '*' -pytest-cov = '<2.6' -poetry = "*" - -[requires] -python_version = "3.8" diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..df833b3 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,4 @@ +pylint==4.0.4 +pytest==9.0.2 +pytest-cov==7.0.0 +pyfakefs==6.0.0 diff --git a/tests/test_version.py b/tests/test_version.py deleted file mode 100644 index e348d97..0000000 --- a/tests/test_version.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -import sys -from urllib import request -from urllib.error import HTTPError - -import pytest -import toml - -import check_docker as module -from check_docker import check_swarm, check_docker - - -def test_versions_match(): - assert check_docker.__version__ == check_swarm.__version__ - - -def test_module_version_matches(): - assert module.__version__ == check_docker.__version__ - - -def test_project_version_matches(): - project_config = toml.load("pyproject.toml") - project_version = project_config['tool']['poetry']['version'] - - assert project_version == check_docker.__version__ - - -@pytest.mark.skipif('isolated' in os.environ and os.environ['isolated'].lower != 'false', - reason="Can not reach Python packge index when isolated") -@pytest.mark.skipif(sys.version_info[0:2] != (3, 8), reason="Only check on python 3.8, not {}". - format(sys.version_info[0:2])) -def test_package_present(): - req = request.Request("https://pypi.org/project/check_docker/", method="HEAD") - with request.urlopen(req) as resp: - assert resp.getcode() == 200 - - -@pytest.mark.xfail('TRAVIS_BRANCH' in os.environ and os.environ['TRAVIS_BRANCH'].lower != 'master', - reason="Ignore version check outside of master") -@pytest.mark.xfail('GITHUB_HEAD_REF' in os.environ and os.environ['GITHUB_HEAD_REF'].lower != 'master', - reason="Ignore version check outside of master") -@pytest.mark.skipif('isolated' in os.environ and os.environ['isolated'].lower != 'false', - reason="Can not reach Python package index when isolated") -@pytest.mark.skipif(sys.version_info[0:2] != (3, 8), reason="Only check on python 3.8") -def test_ensure_new_version(): - version = check_docker.__version__ - req = request.Request("https://pypi.org/project/check_docker/{version}/". - format(version=version), method="HEAD") - - try: - with request.urlopen(req) as resp: - http_code = resp.getcode() - except HTTPError as e: - http_code = e.code - assert http_code == 404, "Version already exists. Ignore this if you are working on a PR" From f1058d30e2208f3362ff0eca1ac7740c45098e72 Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 10:57:19 +0100 Subject: [PATCH 07/11] Change container list output to be newline separated MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This makes for a more readable output when there are many containers Co-authored-by: Dennis Mücklich <58838393+dmueckli@users.noreply.github.com> --- check_docker/check_docker.py | 4 ++-- tests/test_check_docker.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index 3eca5d5..516c3e7 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -899,10 +899,10 @@ def print_results(): if len(filtered_messages) == 0: messages_concat = 'OK' else: - messages_concat = '; '.join(filtered_messages) + messages_concat = '\n'.join(filtered_messages) else: - messages_concat = '; '.join(messages) + messages_concat = '\n'.join(messages) if no_performance or len(performance_data) == 0: print(messages_concat) diff --git a/tests/test_check_docker.py b/tests/test_check_docker.py index a990164..f7058d1 100644 --- a/tests/test_check_docker.py +++ b/tests/test_check_docker.py @@ -703,8 +703,8 @@ def test_perform(check_docker, fs, args, called): @pytest.mark.parametrize("messages, perf_data, expected", ( (['TEST'], [], 'TEST'), - (['FOO', 'BAR'], [], 'FOO; BAR'), - (['FOO', 'BAR'], ['1;2;3;4;'], 'FOO; BAR|1;2;3;4;') + (['FOO', 'BAR'], [], 'FOO\nBAR'), + (['FOO', 'BAR'], ['1;2;3;4;'], 'FOO\nBAR|1;2;3;4;') )) def test_print_results(check_docker, capsys, messages, perf_data, expected): # These sometimes get set to true when using random-order plugin, for example --random-order-seed=620808 @@ -720,8 +720,8 @@ def test_print_results(check_docker, capsys, messages, perf_data, expected): @pytest.mark.parametrize("messages, perf_data, no_ok, no_performance, expected", ( ([], [], False, False, ''), (['TEST'], [], False, False, 'TEST'), - (['FOO', 'BAR'], [], False, False, 'FOO; BAR'), - (['FOO', 'BAR'], ['1;2;3;4;'], False, False, 'FOO; BAR|1;2;3;4;'), + (['FOO', 'BAR'], [], False, False, 'FOO\nBAR'), + (['FOO', 'BAR'], ['1;2;3;4;'], False, False, 'FOO\nBAR|1;2;3;4;'), ([], [], True, False, 'OK'), (['OK: TEST'], [], True, False, 'OK'), (['OK: FOO', 'OK: BAR'], [], True, False, 'OK'), From e7bcd72ffc1ab86da28243ff53b449746b928d2a Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 11:44:11 +0100 Subject: [PATCH 08/11] Handle URL and JSON errors when parsing daemon response --- check_docker/check_docker.py | 45 ++++++++++++++++++++++++++---------- check_docker/check_swarm.py | 39 +++++++++++++++++++++++++------ tests/test_check_docker.py | 5 ++-- 3 files changed, 68 insertions(+), 21 deletions(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index 516c3e7..025b6f9 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# logging.basicConfig(level=logging.DEBUG) + import argparse import json import logging @@ -8,6 +8,7 @@ import re import socket import stat +import sys import traceback from collections import deque, namedtuple, UserDict, defaultdict from concurrent import futures @@ -276,16 +277,31 @@ def evaluate_numeric_thresholds(container, value, thresholds, name, short_name, @lru_cache(maxsize=None) def get_url(url): logger.debug("get_url: {}".format(url)) - response = better_urllib_get.open(url, timeout=timeout) - logger.debug("get_url: {} {}".format(url, response.status)) - return process_urllib_response(response), response.status + try: + response = better_urllib_get.open(url, timeout=timeout) + logger.debug("get_url: {} {}".format(url, response.status)) + return process_urllib_response(response), response.status + except URLError as e: + unknown(f'Failed to connect to daemon: {e.reason}.') + # We have no result, so we can just exit + print_results() + sys.exit(rc) def process_urllib_response(response): response_bytes = response.read() body = response_bytes.decode('utf-8') - # logger.debug("BODY: {}".format(body)) - return json.loads(body) + logger.debug(body) + + resp = {} + try: + resp = json.loads(body) + except json.JSONDecodeError as e: + unknown(f'Unable to parse response.') + print_results() + sys.exit(rc) + + return resp def get_container_info(name): @@ -304,6 +320,7 @@ def get_state(container): def get_stats(container): content, _ = get_url(daemon + '/containers/{container}/stats?stream=0'.format(container=container)) + print(content) return content @@ -849,6 +866,12 @@ def process_args(args): action='store_true', help='Suppress performance data. Reduces output when performance data is not being used.') + # Debug logging + parser.add_argument('--debug', + dest='debug', + action='store_true', + help='Enable debug logging.') + parser.add_argument('-V', action='version', version='%(prog)s {}'.format(__version__)) if len(args) == 0: @@ -856,6 +879,9 @@ def process_args(args): parsed_args = parser.parse_args(args=args) + if parsed_args.debug: + logging.basicConfig(level=logging.DEBUG) + global timeout timeout = parsed_args.timeout @@ -941,12 +967,7 @@ def perform_checks(raw_args): return # Here is where all the work happens - try: - containers = get_containers(args.containers, args.present) - except URLError as e: - critical(f'Failed to connect to daemon: {e.reason}.') - print_results() - exit(rc) + containers = get_containers(args.containers, args.present) if len(containers) == 0 and not args.present: unknown("No containers names found matching criteria") diff --git a/check_docker/check_swarm.py b/check_docker/check_swarm.py index 8db694b..2cdcfd3 100755 --- a/check_docker/check_swarm.py +++ b/check_docker/check_swarm.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 + import argparse import json import logging @@ -6,10 +7,12 @@ import re import socket import stat +import sys import traceback from functools import lru_cache from http.client import HTTPConnection from sys import argv +from urllib.error import URLError from urllib.request import AbstractHTTPHandler, HTTPHandler, HTTPSHandler, OpenerDirector logger = logging.getLogger() @@ -78,21 +81,34 @@ def socket_open(self, req): better_urllib_get.add_handler(SocketFileHandler()) -# Util functions -############################################################################################# - - @lru_cache() def get_url(url): - response = better_urllib_get.open(url, timeout=timeout) - return process_urllib_response(response), response.status + logger.debug("get_url: {}".format(url)) + try: + response = better_urllib_get.open(url, timeout=timeout) + logger.debug("get_url: {} {}".format(url, response.status)) + return process_urllib_response(response), response.status + except URLError as e: + unknown(f'Failed to connect to daemon: {e.reason}.') + # We have no result, so we can just exit + print_results() + sys.exit(rc) def process_urllib_response(response): response_bytes = response.read() body = response_bytes.decode('utf-8') logger.debug(body) - return json.loads(body) + + resp = {} + try: + resp = json.loads(body) + except json.JSONDecodeError as e: + unknown(f'Unable to parse response.') + print_results() + sys.exit(rc) + + return resp def get_swarm_status(): @@ -289,6 +305,12 @@ def process_args(args): action='store_true', help="Don't require global services to be running on paused nodes") + # Debug logging + parser.add_argument('--debug', + dest='debug', + action='store_true', + help='Enable debug logging.') + parser.add_argument('-V', action='version', version='%(prog)s {}'.format(__version__)) if len(args) == 0: @@ -296,6 +318,9 @@ def process_args(args): parsed_args = parser.parse_args(args=args) + if parsed_args.debug: + logging.basicConfig(level=logging.DEBUG) + global timeout timeout = parsed_args.timeout diff --git a/tests/test_check_docker.py b/tests/test_check_docker.py index f7058d1..101ee86 100644 --- a/tests/test_check_docker.py +++ b/tests/test_check_docker.py @@ -108,14 +108,15 @@ def mock_open(*args, **kwargs): with patch('check_docker.check_docker.HTTPSHandler.https_open', side_effect=mock_open), \ patch('check_docker.check_docker.Oauth2TokenAuthHandler._get_oauth2_token', return_value='test_token') as get_token: - with pytest.raises(HTTPError): + with pytest.raises(SystemExit): check_docker.get_url(url='https://example.com/test') def test_get_url_500(check_docker): expected_exception = HTTPError(code=500, fp=None, url='url', msg='msg', hdrs=[]) with patch('check_docker.check_docker.HTTPSHandler.https_open', side_effect=expected_exception), \ - pytest.raises(HTTPError): + pytest.raises(SystemExit): + check_docker.get_url(url='https://example.com/test') From d95b3bce33fb2f9c4efcfae7fdd5c44704d80196 Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 15:19:23 +0100 Subject: [PATCH 09/11] Change swarm service list output to be newline separated - Also removes erroneous debug print --- check_docker/check_docker.py | 1 - check_docker/check_swarm.py | 3 +-- tests/test_check_swarm.py | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index 025b6f9..d6456ea 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -320,7 +320,6 @@ def get_state(container): def get_stats(container): content, _ = get_url(daemon + '/containers/{container}/stats?stream=0'.format(container=container)) - print(content) return content diff --git a/check_docker/check_swarm.py b/check_docker/check_swarm.py index 2cdcfd3..340adfd 100755 --- a/check_docker/check_swarm.py +++ b/check_docker/check_swarm.py @@ -351,7 +351,7 @@ def socketfile_permissions_failure(parsed_args): def print_results(): - print('; '.join(messages)) + print('\n'.join(messages)) def perform_checks(raw_args): @@ -360,7 +360,6 @@ def perform_checks(raw_args): unknown("Cannot access docker socket file. User ID={}, socket file={}".format(os.getuid(), args.connection)) else: # Here is where all the work happens - ############################################################################################# try: if args.swarm: check_swarm() diff --git a/tests/test_check_swarm.py b/tests/test_check_swarm.py index d9f39ac..da630ef 100644 --- a/tests/test_check_swarm.py +++ b/tests/test_check_swarm.py @@ -375,7 +375,7 @@ def test_check_not_swarm_service(check_swarm, fs): @pytest.mark.parametrize("messages, perf_data, expected", ( ([], [], ''), (['TEST'], [], 'TEST'), - (['FOO', 'BAR'], [], 'FOO; BAR'), + (['FOO', 'BAR'], [], 'FOO\nBAR'), )) def test_print_results(check_swarm, capsys, messages, perf_data, expected): check_swarm.messages = messages From 165d5ca3675c2fa573691083328bb3adab230100 Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 15:33:23 +0100 Subject: [PATCH 10/11] Fix no_ok arg setting the no_performance arg I think this was probably a copy paste error. Fixing it will change the default behavior so that performance data is show. The original can be restored by simply using the --no-performance flag --- check_docker/check_docker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index d6456ea..e0b5781 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -951,7 +951,7 @@ def perform_checks(raw_args): no_ok = args.no_ok global no_performance - no_performance = args.no_ok + no_performance = args.no_performance if socketfile_permissions_failure(args): unknown("Cannot access docker socket file. User ID={}, socket file={}".format(os.getuid(), args.connection)) From d567378d020cb1ca400f8d8e8f7b06a61d83b81b Mon Sep 17 00:00:00 2001 From: Markus Opolka Date: Mon, 26 Jan 2026 16:13:31 +0100 Subject: [PATCH 11/11] Added support for manifest lists Co-authored-by: mlu --- check_docker/check_docker.py | 41 +++++++++++++++++++++++++----------- tests/test_check_docker.py | 29 +++++++++++++++++-------- 2 files changed, 49 insertions(+), 21 deletions(-) diff --git a/check_docker/check_docker.py b/check_docker/check_docker.py index e0b5781..7e5069e 100755 --- a/check_docker/check_docker.py +++ b/check_docker/check_docker.py @@ -374,26 +374,39 @@ def normalize_image_name_to_manifest_url(image_name, insecure_registries): # Registry query url scheme = 'http' if parsed_url.registry.lower() in lower_insecure else 'https' - url = '{scheme}://{registry}/v2/{image_name}/manifests/{image_tag}'.format(scheme=scheme, + url = '{scheme}://{registry}/v2/{image_name}/manifests'.format(scheme=scheme, registry=parsed_url.registry, - image_name=parsed_url.name, - image_tag=parsed_url.tag) - return url, parsed_url.registry + image_name=parsed_url.name) + image_tag = parsed_url.tag + + return url, image_tag, parsed_url.registry # Auth servers seem picky about being hit too hard. Can't figure out why. ;) # As result it is best to single thread this check # This is based on https://docs.docker.com/registry/spec/auth/token/#requesting-a-token -def get_digest_from_registry(url): +def get_digest_from_registry(url, image_tag, image_arch): logger.debug("get_digest_from_registry") # query registry # TODO: Handle logging in if needed - registry_info, status_code = get_url(url=url) + image_url = '{}/{}'.format(url, image_tag) + registry_info, status_code = get_url(url=image_url) + + if 'manifests' in registry_info: + digest = find_digest_for_architecture(registry_info['manifests'], image_arch) + image_url = '{}/{}'.format(url, digest) + registry_info, status_code = get_url(url=image_url) if status_code != 200: raise RegistryError(response=registry_info) + return registry_info['config'].get('digest', None) +def find_digest_for_architecture(manifests, image_arch): + for manifest in manifests: + if 'platform' in manifest and manifest['platform']['architecture'] == image_arch: + return manifest.get('digest') + return None def set_rc(new_rc): global rc @@ -641,10 +654,14 @@ def check_version(container, insecure_registries): unknown('"{}" has last no repository tag. Is this anywhere else?'.format(container)) return - url, registry = normalize_image_name_to_manifest_url(image_urls[0], insecure_registries) - logger.debug("Looking up image digest here {}".format(url)) + + container_image = get_container_info(container)['Image'] + image_arch = get_image_info(container_image)['Architecture'] + + url, image_tag, registry = normalize_image_name_to_manifest_url(image_urls[0], insecure_registries) + logger.debug("Looking up image digest here {}/{}".format(url, image_tag)) try: - registry_hash = get_digest_from_registry(url) + registry_hash = get_digest_from_registry(url, image_tag, image_arch) except URLError as e: if hasattr(e.reason, 'reason') and e.reason.reason == 'UNKNOWN_PROTOCOL': unknown( @@ -653,12 +670,12 @@ def check_version(container, insecure_registries): return elif hasattr(e.reason, 'strerror') and e.reason.strerror == 'nodename nor servname provided, or not known': unknown( - "Cannot reach registry for {} at {}".format(container, url)) + "Cannot reach registry for {} at {}/{}".format(container, url, image_tag)) return else: raise e except RegistryError as e: - unknown("Cannot check version, couldn't retrieve digest for {} while checking {}.".format(container, url)) + unknown("Cannot check version, couldn't retrieve digest for {} while checking {}/{}.".format(container, url, image_tag)) return logger.debug("Image digests, local={} remote={}".format(image_id, registry_hash)) if registry_hash == image_id: @@ -789,7 +806,7 @@ def process_args(args): action='store', type=str, metavar='WARN:CRIT', - help='Check cpu usage percentage taking into account any limits.') + help='Check cpu usage percentage taking into account any limits. Valid values are 0 - 100.') # Memory parser.add_argument('--memory', diff --git a/tests/test_check_docker.py b/tests/test_check_docker.py index 101ee86..1225bde 100644 --- a/tests/test_check_docker.py +++ b/tests/test_check_docker.py @@ -846,13 +846,14 @@ def test_get_container_image_urls(check_docker): assert urls == ['test'] -@pytest.mark.parametrize('image_url, expected_normal_url', ( - ('foo', 'https://' + cd.DEFAULT_PUBLIC_REGISTRY + '/v2/library/foo/manifests/latest'), - ('insecure.com/foo', 'http://insecure.com/v2/foo/manifests/latest'), +@pytest.mark.parametrize('image_url, expected_normal_url, expected_tag', ( + ('foo', 'https://' + cd.DEFAULT_PUBLIC_REGISTRY + '/v2/library/foo/manifests', 'latest'), + ('insecure.com/foo', 'http://insecure.com/v2/foo/manifests', 'latest'), )) -def test_normalize_image_name_to_manifest_url(check_docker, image_url, expected_normal_url): +def test_normalize_image_name_to_manifest_url(check_docker, image_url, expected_normal_url, expected_tag): insecure_registries = ('insecure.com',) - normal_url, _ = check_docker.normalize_image_name_to_manifest_url(image_url, insecure_registries) + normal_url, tag, _ = check_docker.normalize_image_name_to_manifest_url(image_url, insecure_registries) + assert tag == expected_tag assert normal_url == expected_normal_url @@ -866,17 +867,17 @@ def test_get_container_image_id(check_docker): def test_get_digest_from_registry_no_auth(check_docker): fake_data = {'config': {'digest': 'test_token'}} with patch('check_docker.check_docker.get_url', return_value=(fake_data, 200)): - digest = check_docker.get_digest_from_registry('https://example.com/v2/test/manifests/lastest') + digest = check_docker.get_digest_from_registry('https://example.com/v2/test/manifests/', 'latest', 'x86') assert digest == "test_token" def test_get_digest_from_registry_missing_digest(check_docker): with patch('check_docker.check_docker.get_url', return_value=({},404)): with pytest.raises(check_docker.RegistryError): - check_docker.get_digest_from_registry('https://example.com/v2/test/manifests/lastest') + check_docker.get_digest_from_registry('https://example.com/v2/test/manifests/', 'lastest', 'arm') -@pytest.mark.parametrize('local_container_container_image_id,registry_container_digest, image_urls, expected_rc', ( +@pytest.mark.parametrize('local_container_container_image_id, registry_container_digest, image_urls, expected_rc', ( ('AAAA', 'AAAA', ('example.com/foo',), cd.OK_RC), ('AAAA', 'BBBB', ('example.com/foo',), cd.CRITICAL_RC), (None, '', ('example.com/foo',), cd.UNKNOWN_RC), @@ -887,6 +888,8 @@ def test_check_version(check_docker, local_container_container_image_id, registr expected_rc): with patch('check_docker.check_docker.get_container_image_id', return_value=local_container_container_image_id), \ patch('check_docker.check_docker.get_container_image_urls', return_value=image_urls), \ + patch('check_docker.check_docker.get_container_info', return_value={'Image': 'sha256:867'}), \ + patch('check_docker.check_docker.get_image_info', return_value={'Architecture': 'arm64'}), \ patch('check_docker.check_docker.get_digest_from_registry', return_value=registry_container_digest): check_docker.check_version('container', tuple()) assert check_docker.rc == expected_rc @@ -895,7 +898,9 @@ def test_check_version(check_docker, local_container_container_image_id, registr def test_check_version_missing_digest(check_docker): with patch('check_docker.check_docker.get_container_image_id', return_value='AAA'), \ patch('check_docker.check_docker.get_container_image_urls', return_value=('example.com/foo',)), \ - patch('check_docker.check_docker.get_digest_from_registry', + patch('check_docker.check_docker.get_container_info', return_value={'Image': 'sha256:867'}), \ + patch('check_docker.check_docker.get_image_info', return_value={'Architecture': 'arm64'}), \ + patch('check_docker.check_docker.get_digest_from_registry', return_value=('', '', ''), side_effect=check_docker.RegistryError(response=None)): check_docker.check_version('container', tuple()) assert check_docker.rc == cd.UNKNOWN_RC @@ -908,6 +913,8 @@ class Reason(): exception = URLError(reason=Reason) with patch('check_docker.check_docker.get_container_image_id', return_value='AAA'), \ patch('check_docker.check_docker.get_container_image_urls', return_value=('example.com/foo',)), \ + patch('check_docker.check_docker.get_container_info', return_value={'Image': 'sha256:867'}), \ + patch('check_docker.check_docker.get_image_info', return_value={'Architecture': 'arm64'}), \ patch('check_docker.check_docker.get_digest_from_registry', side_effect=exception): check_docker.check_version('container', tuple()) assert check_docker.rc == cd.UNKNOWN_RC @@ -921,6 +928,8 @@ class Reason(): exception = URLError(reason=Reason) with patch('check_docker.check_docker.get_container_image_id', return_value='AAA'), \ patch('check_docker.check_docker.get_container_image_urls', return_value=('example.com/foo',)), \ + patch('check_docker.check_docker.get_container_info', return_value={'Image': 'sha256:867'}), \ + patch('check_docker.check_docker.get_image_info', return_value={'Architecture': 'arm64'}), \ patch('check_docker.check_docker.get_digest_from_registry', side_effect=exception): check_docker.check_version('container', tuple()) assert check_docker.rc == cd.UNKNOWN_RC @@ -932,6 +941,8 @@ def test_check_version_exception(check_docker): exception = URLError(reason=None) with patch('check_docker.check_docker.get_container_image_id', return_value='AAA'), \ patch('check_docker.check_docker.get_container_image_urls', return_value=('example.com/foo',)), \ + patch('check_docker.check_docker.get_container_info', return_value={'Image': 'sha256:867'}), \ + patch('check_docker.check_docker.get_image_info', return_value={'Architecture': 'arm64'}), \ patch('check_docker.check_docker.get_digest_from_registry', side_effect=exception), \ pytest.raises(URLError): check_docker.check_version('container', tuple())