forked from CCCHH/ansible-infra
Vendor Galaxy Roles and Collections
This commit is contained in:
parent
c1e1897cda
commit
2aed20393f
3553 changed files with 387444 additions and 2 deletions
|
|
@ -0,0 +1,85 @@
|
|||
# Copyright (c) 2020 Red Hat, Inc.
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
import unittest
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
from ansible.plugins.loader import connection_loader
|
||||
|
||||
|
||||
class TestDockerConnectionClass(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.play_context = PlayContext()
|
||||
self.play_context.prompt = (
|
||||
"[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: "
|
||||
)
|
||||
self.in_stream = StringIO()
|
||||
with mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.get_bin_path",
|
||||
return_value="docker",
|
||||
):
|
||||
self.dc = connection_loader.get(
|
||||
"community.docker.docker", self.play_context, self.in_stream
|
||||
)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
pass
|
||||
|
||||
@mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.Connection._old_docker_version",
|
||||
return_value=("false", "garbage", "", 1),
|
||||
)
|
||||
@mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.Connection._new_docker_version",
|
||||
return_value=("docker version", "1.2.3", "", 0),
|
||||
)
|
||||
def test_docker_connection_module_too_old(
|
||||
self, mock_new_docker_version: t.Any, mock_old_docker_version: t.Any
|
||||
) -> None:
|
||||
self.dc._version = None
|
||||
self.dc.remote_user = "foo"
|
||||
self.assertRaisesRegex(
|
||||
AnsibleError,
|
||||
"^docker connection type requires docker 1.3 or higher$",
|
||||
self.dc._get_actual_user,
|
||||
)
|
||||
|
||||
@mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.Connection._old_docker_version",
|
||||
return_value=("false", "garbage", "", 1),
|
||||
)
|
||||
@mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.Connection._new_docker_version",
|
||||
return_value=("docker version", "1.7.0", "", 0),
|
||||
)
|
||||
def test_docker_connection_module(
|
||||
self, mock_new_docker_version: t.Any, mock_old_docker_version: t.Any
|
||||
) -> None:
|
||||
self.dc._version = None
|
||||
|
||||
# old version and new version fail
|
||||
@mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.Connection._old_docker_version",
|
||||
return_value=("false", "garbage", "", 1),
|
||||
)
|
||||
@mock.patch(
|
||||
"ansible_collections.community.docker.plugins.connection.docker.Connection._new_docker_version",
|
||||
return_value=("false", "garbage", "", 1),
|
||||
)
|
||||
def test_docker_connection_module_wrong_cmd(
|
||||
self, mock_new_docker_version: t.Any, mock_old_docker_version: t.Any
|
||||
) -> None:
|
||||
self.dc._version = None
|
||||
self.dc.remote_user = "foo"
|
||||
self.assertRaisesRegex(
|
||||
AnsibleError,
|
||||
"^Docker version check (.*?) failed:",
|
||||
self.dc._get_actual_user,
|
||||
)
|
||||
|
|
@ -0,0 +1,333 @@
|
|||
# Copyright (c), Felix Fontein <felix@fontein.de>, 2020
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from unittest.mock import create_autospec
|
||||
|
||||
import pytest
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.template import Templar
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.utils.trust import (
|
||||
make_trusted,
|
||||
)
|
||||
|
||||
from ansible_collections.community.docker.plugins.inventory.docker_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", name="templar")
|
||||
def templar_fixture() -> Templar:
|
||||
dataloader = create_autospec(DataLoader, instance=True)
|
||||
return Templar(loader=dataloader)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", name="inventory")
|
||||
def inventory_fixture(templar: Templar) -> InventoryModule:
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
r.templar = templar
|
||||
return r
|
||||
|
||||
|
||||
LOVING_THARP = {
|
||||
"Id": "7bd547963679e3209cafd52aff21840b755c96fd37abcd7a6e19da8da6a7f49a",
|
||||
"Name": "/loving_tharp",
|
||||
"Image": "sha256:349f492ff18add678364a62a67ce9a13487f14293ae0af1baf02398aa432f385",
|
||||
"State": {
|
||||
"Running": True,
|
||||
},
|
||||
"Config": {
|
||||
"Image": "quay.io/ansible/ubuntu1804-test-container:1.21.0",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
LOVING_THARP_STACK = {
|
||||
"Id": "7bd547963679e3209cafd52aff21840b755c96fd37abcd7a6e19da8da6a7f49a",
|
||||
"Name": "/loving_tharp",
|
||||
"Image": "sha256:349f492ff18add678364a62a67ce9a13487f14293ae0af1baf02398aa432f385",
|
||||
"State": {
|
||||
"Running": True,
|
||||
},
|
||||
"Config": {
|
||||
"Image": "quay.io/ansible/ubuntu1804-test-container:1.21.0",
|
||||
"Labels": {
|
||||
"com.docker.stack.namespace": "my_stack",
|
||||
},
|
||||
},
|
||||
"NetworkSettings": {
|
||||
"Ports": {
|
||||
"22/tcp": [{"HostIp": "0.0.0.0", "HostPort": "32802"}],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
LOVING_THARP_SERVICE = {
|
||||
"Id": "7bd547963679e3209cafd52aff21840b755c96fd37abcd7a6e19da8da6a7f49a",
|
||||
"Name": "/loving_tharp",
|
||||
"Image": "sha256:349f492ff18add678364a62a67ce9a13487f14293ae0af1baf02398aa432f385",
|
||||
"State": {
|
||||
"Running": True,
|
||||
},
|
||||
"Config": {
|
||||
"Image": "quay.io/ansible/ubuntu1804-test-container:1.21.0",
|
||||
"Labels": {
|
||||
"com.docker.swarm.service.name": "my_service",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def create_get_option(
|
||||
options: dict[str, t.Any], default: t.Any = False
|
||||
) -> Callable[[str], t.Any]:
|
||||
def get_option(option: str) -> t.Any:
|
||||
if option in options:
|
||||
return options[option]
|
||||
return default
|
||||
|
||||
return get_option
|
||||
|
||||
|
||||
class FakeClient:
|
||||
def __init__(self, *hosts: dict[str, t.Any]) -> None:
|
||||
self.get_results: dict[str, t.Any] = {}
|
||||
list_reply: list[dict[str, t.Any]] = []
|
||||
for host in hosts:
|
||||
list_reply.append(
|
||||
{
|
||||
"Id": host["Id"],
|
||||
"Names": [host["Name"]] if host["Name"] else [],
|
||||
"Image": host["Config"]["Image"],
|
||||
"ImageId": host["Image"],
|
||||
}
|
||||
)
|
||||
self.get_results[f"/containers/{host['Name']}/json"] = host
|
||||
self.get_results[f"/containers/{host['Id']}/json"] = host
|
||||
self.get_results["/containers/json"] = list_reply
|
||||
|
||||
def get_json(self, url: str, *param: str, **kwargs: t.Any) -> t.Any:
|
||||
url = url.format(*param)
|
||||
return self.get_results[url]
|
||||
|
||||
|
||||
def test_populate(inventory: InventoryModule, mocker: t.Any) -> None:
|
||||
assert inventory.inventory is not None
|
||||
client = FakeClient(LOVING_THARP)
|
||||
|
||||
inventory.get_option = mocker.MagicMock( # type: ignore[method-assign]
|
||||
side_effect=create_get_option(
|
||||
{
|
||||
"verbose_output": True,
|
||||
"connection_type": "docker-api",
|
||||
"add_legacy_groups": False,
|
||||
"compose": {},
|
||||
"groups": {},
|
||||
"keyed_groups": {},
|
||||
"filters": None,
|
||||
}
|
||||
)
|
||||
)
|
||||
inventory._populate(client) # type: ignore
|
||||
|
||||
host_1 = inventory.inventory.get_host("loving_tharp")
|
||||
assert host_1 is not None
|
||||
host_1_vars = host_1.get_vars()
|
||||
|
||||
assert host_1_vars["ansible_host"] == "loving_tharp"
|
||||
assert host_1_vars["ansible_connection"] == "community.docker.docker_api"
|
||||
assert "ansible_ssh_host" not in host_1_vars
|
||||
assert "ansible_ssh_port" not in host_1_vars
|
||||
assert "docker_state" in host_1_vars
|
||||
assert "docker_config" in host_1_vars
|
||||
assert "docker_image" in host_1_vars
|
||||
|
||||
assert len(inventory.inventory.groups["ungrouped"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["all"].hosts) == 0
|
||||
assert len(inventory.inventory.groups) == 2
|
||||
assert len(inventory.inventory.hosts) == 1
|
||||
|
||||
|
||||
def test_populate_service(inventory: InventoryModule, mocker: t.Any) -> None:
|
||||
assert inventory.inventory is not None
|
||||
client = FakeClient(LOVING_THARP_SERVICE)
|
||||
|
||||
inventory.get_option = mocker.MagicMock( # type: ignore[method-assign]
|
||||
side_effect=create_get_option(
|
||||
{
|
||||
"verbose_output": False,
|
||||
"connection_type": "docker-cli",
|
||||
"add_legacy_groups": True,
|
||||
"compose": {},
|
||||
"groups": {},
|
||||
"keyed_groups": {},
|
||||
"docker_host": "unix://var/run/docker.sock",
|
||||
"filters": None,
|
||||
}
|
||||
)
|
||||
)
|
||||
inventory._populate(client) # type: ignore
|
||||
|
||||
host_1 = inventory.inventory.get_host("loving_tharp")
|
||||
assert host_1 is not None
|
||||
host_1_vars = host_1.get_vars()
|
||||
|
||||
assert host_1_vars["ansible_host"] == "loving_tharp"
|
||||
assert host_1_vars["ansible_connection"] == "community.docker.docker"
|
||||
assert "ansible_ssh_host" not in host_1_vars
|
||||
assert "ansible_ssh_port" not in host_1_vars
|
||||
assert "docker_state" not in host_1_vars
|
||||
assert "docker_config" not in host_1_vars
|
||||
assert "docker_image" not in host_1_vars
|
||||
|
||||
assert len(inventory.inventory.groups["ungrouped"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["all"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["7bd547963679e"].hosts) == 1
|
||||
assert (
|
||||
len(
|
||||
inventory.inventory.groups[
|
||||
"7bd547963679e3209cafd52aff21840b755c96fd37abcd7a6e19da8da6a7f49a"
|
||||
].hosts
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
len(
|
||||
inventory.inventory.groups[
|
||||
"image_quay.io/ansible/ubuntu1804-test-container:1.21.0"
|
||||
].hosts
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert len(inventory.inventory.groups["loving_tharp"].hosts) == 1
|
||||
assert len(inventory.inventory.groups["running"].hosts) == 1
|
||||
assert len(inventory.inventory.groups["stopped"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["service_my_service"].hosts) == 1
|
||||
assert len(inventory.inventory.groups["unix://var/run/docker.sock"].hosts) == 1
|
||||
assert len(inventory.inventory.groups) == 10
|
||||
assert len(inventory.inventory.hosts) == 1
|
||||
|
||||
|
||||
def test_populate_stack(inventory: InventoryModule, mocker: t.Any) -> None:
|
||||
assert inventory.inventory is not None
|
||||
client = FakeClient(LOVING_THARP_STACK)
|
||||
|
||||
inventory.get_option = mocker.MagicMock( # type: ignore[method-assign]
|
||||
side_effect=create_get_option(
|
||||
{
|
||||
"verbose_output": False,
|
||||
"connection_type": "ssh",
|
||||
"add_legacy_groups": True,
|
||||
"compose": {},
|
||||
"groups": {},
|
||||
"keyed_groups": {},
|
||||
"docker_host": "unix://var/run/docker.sock",
|
||||
"default_ip": "127.0.0.1",
|
||||
"private_ssh_port": 22,
|
||||
"filters": None,
|
||||
}
|
||||
)
|
||||
)
|
||||
inventory._populate(client) # type: ignore
|
||||
|
||||
host_1 = inventory.inventory.get_host("loving_tharp")
|
||||
assert host_1 is not None
|
||||
host_1_vars = host_1.get_vars()
|
||||
|
||||
assert host_1_vars["ansible_ssh_host"] == "127.0.0.1"
|
||||
assert host_1_vars["ansible_ssh_port"] == "32802"
|
||||
assert "ansible_host" not in host_1_vars
|
||||
assert "ansible_connection" not in host_1_vars
|
||||
assert "docker_state" not in host_1_vars
|
||||
assert "docker_config" not in host_1_vars
|
||||
assert "docker_image" not in host_1_vars
|
||||
|
||||
assert len(inventory.inventory.groups["ungrouped"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["all"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["7bd547963679e"].hosts) == 1
|
||||
assert (
|
||||
len(
|
||||
inventory.inventory.groups[
|
||||
"7bd547963679e3209cafd52aff21840b755c96fd37abcd7a6e19da8da6a7f49a"
|
||||
].hosts
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
len(
|
||||
inventory.inventory.groups[
|
||||
"image_quay.io/ansible/ubuntu1804-test-container:1.21.0"
|
||||
].hosts
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert len(inventory.inventory.groups["loving_tharp"].hosts) == 1
|
||||
assert len(inventory.inventory.groups["running"].hosts) == 1
|
||||
assert len(inventory.inventory.groups["stopped"].hosts) == 0
|
||||
assert len(inventory.inventory.groups["stack_my_stack"].hosts) == 1
|
||||
assert len(inventory.inventory.groups["unix://var/run/docker.sock"].hosts) == 1
|
||||
assert len(inventory.inventory.groups) == 10
|
||||
assert len(inventory.inventory.hosts) == 1
|
||||
|
||||
|
||||
def test_populate_filter_none(inventory: InventoryModule, mocker: t.Any) -> None:
|
||||
assert inventory.inventory is not None
|
||||
client = FakeClient(LOVING_THARP)
|
||||
|
||||
inventory.get_option = mocker.MagicMock( # type: ignore[method-assign]
|
||||
side_effect=create_get_option(
|
||||
{
|
||||
"verbose_output": True,
|
||||
"connection_type": "docker-api",
|
||||
"add_legacy_groups": False,
|
||||
"compose": {},
|
||||
"groups": {},
|
||||
"keyed_groups": {},
|
||||
"filters": [
|
||||
{"exclude": True},
|
||||
],
|
||||
}
|
||||
)
|
||||
)
|
||||
inventory._populate(client) # type: ignore
|
||||
|
||||
assert len(inventory.inventory.hosts) == 0
|
||||
|
||||
|
||||
def test_populate_filter(inventory: InventoryModule, mocker: t.Any) -> None:
|
||||
assert inventory.inventory is not None
|
||||
client = FakeClient(LOVING_THARP)
|
||||
|
||||
inventory.get_option = mocker.MagicMock( # type: ignore[method-assign]
|
||||
side_effect=create_get_option(
|
||||
{
|
||||
"verbose_output": True,
|
||||
"connection_type": "docker-api",
|
||||
"add_legacy_groups": False,
|
||||
"compose": {},
|
||||
"groups": {},
|
||||
"keyed_groups": {},
|
||||
"filters": [
|
||||
{"include": make_trusted("docker_state.Running is true")},
|
||||
{"exclude": True},
|
||||
],
|
||||
}
|
||||
)
|
||||
)
|
||||
inventory._populate(client) # type: ignore
|
||||
|
||||
host_1 = inventory.inventory.get_host("loving_tharp")
|
||||
assert host_1 is not None
|
||||
host_1_vars = host_1.get_vars()
|
||||
|
||||
assert host_1_vars["ansible_host"] == "loving_tharp"
|
||||
assert len(inventory.inventory.hosts) == 1
|
||||
|
|
@ -0,0 +1,674 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import struct
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import typing as t
|
||||
import unittest
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
from socketserver import ThreadingTCPServer
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from requests.packages import urllib3
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api import (
|
||||
constants,
|
||||
errors,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.api.client import (
|
||||
APIClient,
|
||||
)
|
||||
from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.constants import (
|
||||
DEFAULT_DOCKER_API_VERSION,
|
||||
)
|
||||
|
||||
from .. import fake_api
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.auth import (
|
||||
AuthConfig,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_TIMEOUT_SECONDS = constants.DEFAULT_TIMEOUT_SECONDS
|
||||
|
||||
|
||||
def create_response(
|
||||
status_code: int = 200,
|
||||
content: bytes | dict[str, t.Any] | list[dict[str, t.Any]] = b"",
|
||||
headers: dict[str, str] | None = None,
|
||||
reason: str = "",
|
||||
elapsed: int = 0,
|
||||
request: requests.PreparedRequest | None = None,
|
||||
raw: urllib3.HTTPResponse | None = None,
|
||||
) -> requests.Response:
|
||||
res = requests.Response()
|
||||
res.status_code = status_code
|
||||
if not isinstance(content, bytes):
|
||||
content = json.dumps(content).encode("ascii")
|
||||
res._content = content
|
||||
res.headers = requests.structures.CaseInsensitiveDict(headers or {})
|
||||
res.reason = reason
|
||||
res.elapsed = datetime.timedelta(elapsed)
|
||||
res.request = request # type: ignore
|
||||
res.raw = raw
|
||||
return res
|
||||
|
||||
|
||||
def fake_resolve_authconfig( # pylint: disable=keyword-arg-before-vararg
|
||||
authconfig: AuthConfig, *args: t.Any, registry: str | None = None, **kwargs: t.Any
|
||||
) -> None:
|
||||
return None
|
||||
|
||||
|
||||
def fake_inspect_container(self: object, container: str, tty: bool = False) -> t.Any:
|
||||
return fake_api.get_fake_inspect_container(tty=tty)[1]
|
||||
|
||||
|
||||
def fake_resp(
|
||||
method: str, url: str, *args: t.Any, **kwargs: t.Any
|
||||
) -> requests.Response:
|
||||
key: str | tuple[str, str] | None = None
|
||||
if url in fake_api.fake_responses:
|
||||
key = url
|
||||
elif (url, method) in fake_api.fake_responses:
|
||||
key = (url, method)
|
||||
if not key:
|
||||
raise NotImplementedError(f"{method} {url}")
|
||||
status_code, content = fake_api.fake_responses[key]()
|
||||
return create_response(status_code=status_code, content=content)
|
||||
|
||||
|
||||
fake_request = mock.Mock(side_effect=fake_resp)
|
||||
|
||||
|
||||
def fake_get(
|
||||
self: APIClient, url: str, *args: str, **kwargs: t.Any
|
||||
) -> requests.Response:
|
||||
return fake_request("GET", url, *args, **kwargs)
|
||||
|
||||
|
||||
def fake_post(
|
||||
self: APIClient, url: str, *args: str, **kwargs: t.Any
|
||||
) -> requests.Response:
|
||||
return fake_request("POST", url, *args, **kwargs)
|
||||
|
||||
|
||||
def fake_put(
|
||||
self: APIClient, url: str, *args: str, **kwargs: t.Any
|
||||
) -> requests.Response:
|
||||
return fake_request("PUT", url, *args, **kwargs)
|
||||
|
||||
|
||||
def fake_delete(
|
||||
self: APIClient, url: str, *args: str, **kwargs: t.Any
|
||||
) -> requests.Response:
|
||||
return fake_request("DELETE", url, *args, **kwargs)
|
||||
|
||||
|
||||
def fake_read_from_socket(
|
||||
self: APIClient,
|
||||
response: requests.Response,
|
||||
stream: bool,
|
||||
tty: bool = False,
|
||||
demux: bool = False,
|
||||
) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
url_base = f"{fake_api.prefix}/" # pylint: disable=invalid-name
|
||||
url_prefix = f"{url_base}v{DEFAULT_DOCKER_API_VERSION}/" # pylint: disable=invalid-name
|
||||
|
||||
|
||||
class BaseAPIClientTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.patcher = mock.patch.multiple(
|
||||
"ansible_collections.community.docker.plugins.module_utils._api.api.client.APIClient",
|
||||
get=fake_get,
|
||||
post=fake_post,
|
||||
put=fake_put,
|
||||
delete=fake_delete,
|
||||
_read_from_socket=fake_read_from_socket,
|
||||
)
|
||||
self.patcher.start()
|
||||
self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.client.close()
|
||||
self.patcher.stop()
|
||||
|
||||
def base_create_payload(
|
||||
self, img: str = "busybox", cmd: list[str] | None = None
|
||||
) -> dict[str, t.Any]:
|
||||
if not cmd:
|
||||
cmd = ["true"]
|
||||
return {
|
||||
"Tty": False,
|
||||
"Image": img,
|
||||
"Cmd": cmd,
|
||||
"AttachStdin": False,
|
||||
"AttachStderr": True,
|
||||
"AttachStdout": True,
|
||||
"StdinOnce": False,
|
||||
"OpenStdin": False,
|
||||
"NetworkDisabled": False,
|
||||
}
|
||||
|
||||
|
||||
class DockerApiTest(BaseAPIClientTest):
|
||||
def test_ctor(self) -> None:
|
||||
with pytest.raises(errors.DockerException) as excinfo:
|
||||
APIClient(version=1.12) # type: ignore
|
||||
|
||||
assert (
|
||||
str(excinfo.value)
|
||||
== "Version parameter must be a string or None. Found float"
|
||||
)
|
||||
|
||||
def test_url_valid_resource(self) -> None:
|
||||
url = self.client._url("/hello/{0}/world", "somename")
|
||||
assert url == f"{url_prefix}hello/somename/world"
|
||||
|
||||
url = self.client._url("/hello/{0}/world/{1}", "somename", "someothername")
|
||||
assert url == f"{url_prefix}hello/somename/world/someothername"
|
||||
|
||||
url = self.client._url("/hello/{0}/world", "some?name")
|
||||
assert url == f"{url_prefix}hello/some%3Fname/world"
|
||||
|
||||
url = self.client._url("/images/{0}/push", "localhost:5000/image")
|
||||
assert url == f"{url_prefix}images/localhost:5000/image/push"
|
||||
|
||||
def test_url_invalid_resource(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
self.client._url("/hello/{0}/world", ["sakuya", "izayoi"]) # type: ignore
|
||||
|
||||
def test_url_no_resource(self) -> None:
|
||||
url = self.client._url("/simple")
|
||||
assert url == f"{url_prefix}simple"
|
||||
|
||||
def test_url_unversioned_api(self) -> None:
|
||||
url = self.client._url("/hello/{0}/world", "somename", versioned_api=False)
|
||||
assert url == f"{url_base}hello/somename/world"
|
||||
|
||||
def test_version(self) -> None:
|
||||
self.client.version()
|
||||
|
||||
fake_request.assert_called_with(
|
||||
"GET", url_prefix + "version", timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
def test_version_no_api_version(self) -> None:
|
||||
self.client.version(False)
|
||||
|
||||
fake_request.assert_called_with(
|
||||
"GET", url_base + "version", timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
def test_retrieve_server_version(self) -> None:
|
||||
client = APIClient(version="auto")
|
||||
assert isinstance(client._version, str)
|
||||
assert client._version != "auto"
|
||||
client.close()
|
||||
|
||||
def test_auto_retrieve_server_version(self) -> None:
|
||||
version = self.client._retrieve_server_version()
|
||||
assert isinstance(version, str)
|
||||
|
||||
def test_info(self) -> None:
|
||||
self.client.info()
|
||||
|
||||
fake_request.assert_called_with(
|
||||
"GET", url_prefix + "info", timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
def test_search(self) -> None:
|
||||
self.client.get_json("/images/search", params={"term": "busybox"})
|
||||
|
||||
fake_request.assert_called_with(
|
||||
"GET",
|
||||
url_prefix + "images/search",
|
||||
params={"term": "busybox"},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
def test_login(self) -> None:
|
||||
self.client.login("sakuya", "izayoi")
|
||||
args = fake_request.call_args
|
||||
assert args[0][0] == "POST"
|
||||
assert args[0][1] == url_prefix + "auth"
|
||||
assert json.loads(args[1]["data"]) == {
|
||||
"username": "sakuya",
|
||||
"password": "izayoi",
|
||||
}
|
||||
assert args[1]["headers"] == {"Content-Type": "application/json"}
|
||||
assert self.client._auth_configs.auths["docker.io"] == {
|
||||
"email": None,
|
||||
"password": "izayoi",
|
||||
"username": "sakuya",
|
||||
"serveraddress": None,
|
||||
}
|
||||
|
||||
def _socket_path_for_client_session(self, client: APIClient) -> str:
|
||||
socket_adapter = client.get_adapter("http+docker://")
|
||||
return socket_adapter.socket_path # type: ignore[attr-defined]
|
||||
|
||||
def test_url_compatibility_unix(self) -> None:
|
||||
c = APIClient(base_url="unix://socket", version=DEFAULT_DOCKER_API_VERSION)
|
||||
|
||||
assert self._socket_path_for_client_session(c) == "/socket"
|
||||
|
||||
def test_url_compatibility_unix_triple_slash(self) -> None:
|
||||
c = APIClient(base_url="unix:///socket", version=DEFAULT_DOCKER_API_VERSION)
|
||||
|
||||
assert self._socket_path_for_client_session(c) == "/socket"
|
||||
|
||||
def test_url_compatibility_http_unix_triple_slash(self) -> None:
|
||||
c = APIClient(
|
||||
base_url="http+unix:///socket", version=DEFAULT_DOCKER_API_VERSION
|
||||
)
|
||||
|
||||
assert self._socket_path_for_client_session(c) == "/socket"
|
||||
|
||||
def test_url_compatibility_http(self) -> None:
|
||||
c = APIClient(
|
||||
base_url="http://hostname:1234", version=DEFAULT_DOCKER_API_VERSION
|
||||
)
|
||||
|
||||
assert c.base_url == "http://hostname:1234"
|
||||
|
||||
def test_url_compatibility_tcp(self) -> None:
|
||||
c = APIClient(
|
||||
base_url="tcp://hostname:1234", version=DEFAULT_DOCKER_API_VERSION
|
||||
)
|
||||
|
||||
assert c.base_url == "http://hostname:1234"
|
||||
|
||||
def test_remove_link(self) -> None:
|
||||
self.client.delete_call(
|
||||
"/containers/{0}",
|
||||
"3cc2351ab11b",
|
||||
params={"v": False, "link": True, "force": False},
|
||||
)
|
||||
|
||||
fake_request.assert_called_with(
|
||||
"DELETE",
|
||||
url_prefix + "containers/3cc2351ab11b",
|
||||
params={"v": False, "link": True, "force": False},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
def test_stream_helper_decoding(self) -> None:
|
||||
status_code, content = fake_api.fake_responses[url_prefix + "events"]()
|
||||
content_str = json.dumps(content).encode("utf-8")
|
||||
body = io.BytesIO(content_str)
|
||||
|
||||
# mock a stream interface
|
||||
raw_resp = urllib3.HTTPResponse(body=body)
|
||||
raw_resp._fp.chunked = True
|
||||
raw_resp._fp.chunk_left = len(body.getvalue()) - 1
|
||||
|
||||
# pass `decode=False` to the helper
|
||||
raw_resp._fp.seek(0)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp))
|
||||
assert result == content_str
|
||||
|
||||
# pass `decode=True` to the helper
|
||||
raw_resp._fp.seek(0)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp, decode=True))
|
||||
assert result == content
|
||||
|
||||
# non-chunked response, pass `decode=False` to the helper
|
||||
raw_resp._fp.chunked = False
|
||||
raw_resp._fp.seek(0)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp))
|
||||
assert result == content_str.decode("utf-8") # type: ignore
|
||||
|
||||
# non-chunked response, pass `decode=True` to the helper
|
||||
raw_resp._fp.seek(0)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp, decode=True))
|
||||
assert result == content
|
||||
|
||||
|
||||
class UnixSocketStreamTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
socket_dir = tempfile.mkdtemp()
|
||||
self.build_context = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, socket_dir)
|
||||
self.addCleanup(shutil.rmtree, self.build_context)
|
||||
self.socket_file = os.path.join(socket_dir, "test_sock.sock")
|
||||
self.server_socket = self._setup_socket()
|
||||
self.stop_server = False
|
||||
server_thread = threading.Thread(target=self.run_server)
|
||||
server_thread.daemon = True
|
||||
server_thread.start()
|
||||
self.response: t.Any = None
|
||||
self.request_handler: t.Any = None
|
||||
self.addCleanup(server_thread.join)
|
||||
self.addCleanup(self.stop)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.stop_server = True
|
||||
|
||||
def _setup_socket(self) -> socket.socket:
|
||||
server_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
server_sock.bind(self.socket_file)
|
||||
# Non-blocking mode so that we can shut the test down easily
|
||||
server_sock.setblocking(0) # type: ignore
|
||||
server_sock.listen(5)
|
||||
return server_sock
|
||||
|
||||
def run_server(self) -> None:
|
||||
try:
|
||||
while not self.stop_server:
|
||||
try:
|
||||
connection, dummy_client_address = self.server_socket.accept()
|
||||
except socket.error:
|
||||
# Probably no connection to accept yet
|
||||
time.sleep(0.01)
|
||||
continue
|
||||
|
||||
connection.setblocking(1) # type: ignore
|
||||
try:
|
||||
self.request_handler(connection)
|
||||
finally:
|
||||
connection.close()
|
||||
finally:
|
||||
self.server_socket.close()
|
||||
|
||||
def early_response_sending_handler(self, connection: socket.socket) -> None:
|
||||
data = b""
|
||||
headers = None
|
||||
|
||||
connection.sendall(self.response)
|
||||
while not headers:
|
||||
data += connection.recv(2048)
|
||||
parts = data.split(b"\r\n\r\n", 1)
|
||||
if len(parts) == 2:
|
||||
headers, data = parts
|
||||
|
||||
mo = re.search(r"Content-Length: ([0-9]+)", headers.decode())
|
||||
assert mo
|
||||
content_length = int(mo.group(1))
|
||||
|
||||
while True:
|
||||
if len(data) >= content_length:
|
||||
break
|
||||
|
||||
data += connection.recv(2048)
|
||||
|
||||
@pytest.mark.skipif(constants.IS_WINDOWS_PLATFORM, reason="Unix only")
|
||||
def test_early_stream_response(self) -> None:
|
||||
self.request_handler = self.early_response_sending_handler
|
||||
lines = []
|
||||
for i in range(0, 50):
|
||||
line = str(i).encode()
|
||||
lines += [f"{len(line):x}".encode(), line]
|
||||
lines.append(b"0")
|
||||
lines.append(b"")
|
||||
|
||||
self.response = (
|
||||
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n"
|
||||
) + b"\r\n".join(lines)
|
||||
|
||||
with APIClient(
|
||||
base_url="http+unix://" + self.socket_file,
|
||||
version=DEFAULT_DOCKER_API_VERSION,
|
||||
) as client:
|
||||
for i in range(5):
|
||||
try:
|
||||
params = {
|
||||
"t": None,
|
||||
"remote": None,
|
||||
"q": False,
|
||||
"nocache": False,
|
||||
"rm": False,
|
||||
"forcerm": False,
|
||||
"pull": False,
|
||||
"dockerfile": "Dockerfile",
|
||||
}
|
||||
headers = {"Content-Type": "application/tar"}
|
||||
data = b"..."
|
||||
response = client._post(
|
||||
client._url("/build"),
|
||||
params=params,
|
||||
headers=headers,
|
||||
data=data,
|
||||
stream=True,
|
||||
)
|
||||
stream = client._stream_helper(response, decode=False)
|
||||
break
|
||||
except requests.ConnectionError as e:
|
||||
if i == 4:
|
||||
raise e
|
||||
|
||||
assert list(stream) == [str(i).encode() for i in range(50)]
|
||||
|
||||
|
||||
@pytest.mark.skip(
|
||||
"This test requires starting a networking server and tries to access it. "
|
||||
"This does not work with network separation with Docker-based unit tests, "
|
||||
"but it does work with podman-based unit tests."
|
||||
)
|
||||
class TCPSocketStreamTest(unittest.TestCase):
|
||||
stdout_data = b"""
|
||||
Now, those children out there, they're jumping through the
|
||||
flames in the hope that the god of the fire will make them fruitful.
|
||||
Really, you can't blame them. After all, what girl would not prefer the
|
||||
child of a god to that of some acne-scarred artisan?
|
||||
"""
|
||||
stderr_data = b"""
|
||||
And what of the true God? To whose glory churches and monasteries have been
|
||||
built on these islands for generations past? Now shall what of Him?
|
||||
"""
|
||||
|
||||
server: ThreadingTCPServer
|
||||
thread: threading.Thread
|
||||
address: str
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls) -> None:
|
||||
cls.server = ThreadingTCPServer(("", 0), cls.get_handler_class())
|
||||
cls.thread = threading.Thread(target=cls.server.serve_forever)
|
||||
cls.thread.daemon = True
|
||||
cls.thread.start()
|
||||
cls.address = f"http://{socket.gethostname()}:{cls.server.server_address[1]}"
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls) -> None:
|
||||
cls.server.shutdown()
|
||||
cls.server.server_close()
|
||||
cls.thread.join()
|
||||
|
||||
@classmethod
|
||||
def get_handler_class(cls) -> type[BaseHTTPRequestHandler]:
|
||||
stdout_data = cls.stdout_data
|
||||
stderr_data = cls.stderr_data
|
||||
|
||||
class Handler(BaseHTTPRequestHandler):
|
||||
def do_POST(self) -> None: # pylint: disable=invalid-name
|
||||
resp_data = self.get_resp_data()
|
||||
self.send_response(101)
|
||||
self.send_header("Content-Type", "application/vnd.docker.raw-stream")
|
||||
self.send_header("Connection", "Upgrade")
|
||||
self.send_header("Upgrade", "tcp")
|
||||
self.end_headers()
|
||||
self.wfile.flush()
|
||||
time.sleep(0.2)
|
||||
self.wfile.write(resp_data)
|
||||
self.wfile.flush()
|
||||
|
||||
def get_resp_data(self) -> bytes:
|
||||
path = self.path.split("/")[-1]
|
||||
if path == "tty":
|
||||
return stdout_data + stderr_data
|
||||
if path == "no-tty":
|
||||
data = b""
|
||||
data += self.frame_header(1, stdout_data)
|
||||
data += stdout_data
|
||||
data += self.frame_header(2, stderr_data)
|
||||
data += stderr_data
|
||||
return data
|
||||
raise NotImplementedError(f"Unknown path {path}")
|
||||
|
||||
@staticmethod
|
||||
def frame_header(stream: int, data: bytes) -> bytes:
|
||||
return struct.pack(">BxxxL", stream, len(data))
|
||||
|
||||
return Handler
|
||||
|
||||
def request(
|
||||
self,
|
||||
stream: bool | None = None,
|
||||
tty: bool | None = None,
|
||||
demux: bool | None = None,
|
||||
) -> t.Any:
|
||||
assert stream is not None and tty is not None and demux is not None
|
||||
with APIClient(
|
||||
base_url=self.address,
|
||||
version=DEFAULT_DOCKER_API_VERSION,
|
||||
) as client:
|
||||
if tty:
|
||||
url = client._url("/tty")
|
||||
else:
|
||||
url = client._url("/no-tty")
|
||||
resp = client._post(url, stream=True)
|
||||
return client._read_from_socket(resp, stream=stream, tty=tty, demux=demux)
|
||||
|
||||
def test_read_from_socket_tty(self) -> None:
|
||||
res = self.request(stream=True, tty=True, demux=False)
|
||||
assert next(res) == self.stdout_data + self.stderr_data
|
||||
with self.assertRaises(StopIteration):
|
||||
next(res)
|
||||
|
||||
def test_read_from_socket_tty_demux(self) -> None:
|
||||
res = self.request(stream=True, tty=True, demux=True)
|
||||
assert next(res) == (self.stdout_data + self.stderr_data, None)
|
||||
with self.assertRaises(StopIteration):
|
||||
next(res)
|
||||
|
||||
def test_read_from_socket_no_tty(self) -> None:
|
||||
res = self.request(stream=True, tty=False, demux=False)
|
||||
assert next(res) == self.stdout_data
|
||||
assert next(res) == self.stderr_data
|
||||
with self.assertRaises(StopIteration):
|
||||
next(res)
|
||||
|
||||
def test_read_from_socket_no_tty_demux(self) -> None:
|
||||
res = self.request(stream=True, tty=False, demux=True)
|
||||
assert (self.stdout_data, None) == next(res)
|
||||
assert (None, self.stderr_data) == next(res)
|
||||
with self.assertRaises(StopIteration):
|
||||
next(res)
|
||||
|
||||
def test_read_from_socket_no_stream_tty(self) -> None:
|
||||
res = self.request(stream=False, tty=True, demux=False)
|
||||
assert res == self.stdout_data + self.stderr_data
|
||||
|
||||
def test_read_from_socket_no_stream_tty_demux(self) -> None:
|
||||
res = self.request(stream=False, tty=True, demux=True)
|
||||
assert res == (self.stdout_data + self.stderr_data, None)
|
||||
|
||||
def test_read_from_socket_no_stream_no_tty(self) -> None:
|
||||
res = self.request(stream=False, tty=False, demux=False)
|
||||
assert res == self.stdout_data + self.stderr_data
|
||||
|
||||
def test_read_from_socket_no_stream_no_tty_demux(self) -> None:
|
||||
res = self.request(stream=False, tty=False, demux=True)
|
||||
assert res == (self.stdout_data, self.stderr_data)
|
||||
|
||||
|
||||
class UserAgentTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.patcher = mock.patch.object(
|
||||
APIClient,
|
||||
"send",
|
||||
return_value=fake_resp("GET", f"{fake_api.prefix}/version"),
|
||||
)
|
||||
self.mock_send = self.patcher.start()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.patcher.stop()
|
||||
|
||||
def test_default_user_agent(self) -> None:
|
||||
client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
|
||||
client.version()
|
||||
|
||||
assert self.mock_send.call_count == 1
|
||||
headers = self.mock_send.call_args[0][0].headers
|
||||
expected = "ansible-community.docker"
|
||||
assert headers["User-Agent"] == expected
|
||||
|
||||
def test_custom_user_agent(self) -> None:
|
||||
client = APIClient(user_agent="foo/bar", version=DEFAULT_DOCKER_API_VERSION)
|
||||
client.version()
|
||||
|
||||
assert self.mock_send.call_count == 1
|
||||
headers = self.mock_send.call_args[0][0].headers
|
||||
assert headers["User-Agent"] == "foo/bar"
|
||||
|
||||
|
||||
class DisableSocketTest(unittest.TestCase):
|
||||
class DummySocket:
|
||||
def __init__(self, timeout: int | float | None = 60) -> None:
|
||||
self.timeout = timeout
|
||||
self._sock: t.Any = None
|
||||
|
||||
def settimeout(self, timeout: int | float | None) -> None:
|
||||
self.timeout = timeout
|
||||
|
||||
def gettimeout(self) -> int | float | None:
|
||||
return self.timeout
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
|
||||
|
||||
def test_disable_socket_timeout(self) -> None:
|
||||
"""Test that the timeout is disabled on a generic socket object."""
|
||||
the_socket = self.DummySocket()
|
||||
|
||||
self.client._disable_socket_timeout(the_socket) # type: ignore
|
||||
|
||||
assert the_socket.timeout is None
|
||||
|
||||
def test_disable_socket_timeout2(self) -> None:
|
||||
"""Test that the timeouts are disabled on a generic socket object
|
||||
and it's _sock object if present."""
|
||||
the_socket = self.DummySocket()
|
||||
the_socket._sock = self.DummySocket() # type: ignore
|
||||
|
||||
self.client._disable_socket_timeout(the_socket) # type: ignore
|
||||
|
||||
assert the_socket.timeout is None
|
||||
assert the_socket._sock.timeout is None
|
||||
|
||||
def test_disable_socket_timout_non_blocking(self) -> None:
|
||||
"""Test that a non-blocking socket does not get set to blocking."""
|
||||
the_socket = self.DummySocket()
|
||||
the_socket._sock = self.DummySocket(0.0) # type: ignore
|
||||
|
||||
self.client._disable_socket_timeout(the_socket) # type: ignore
|
||||
|
||||
assert the_socket.timeout is None
|
||||
assert the_socket._sock.timeout == 0.0
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
DEFAULT_DOCKER_API_VERSION = "1.45"
|
||||
|
|
@ -0,0 +1,622 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api import constants
|
||||
from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.constants import (
|
||||
DEFAULT_DOCKER_API_VERSION,
|
||||
)
|
||||
|
||||
from . import fake_stat
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
|
||||
CURRENT_VERSION = f"v{DEFAULT_DOCKER_API_VERSION}"
|
||||
|
||||
FAKE_CONTAINER_ID = "3cc2351ab11b"
|
||||
FAKE_IMAGE_ID = "e9aa60c60128"
|
||||
FAKE_EXEC_ID = "d5d177f121dc"
|
||||
FAKE_NETWORK_ID = "33fb6a3462b8"
|
||||
FAKE_IMAGE_NAME = "test_image"
|
||||
FAKE_TARBALL_PATH = "/path/to/tarball"
|
||||
FAKE_REPO_NAME = "repo"
|
||||
FAKE_TAG_NAME = "tag"
|
||||
FAKE_FILE_NAME = "file"
|
||||
FAKE_URL = "myurl"
|
||||
FAKE_PATH = "/path"
|
||||
FAKE_VOLUME_NAME = "perfectcherryblossom"
|
||||
FAKE_NODE_ID = "24ifsmvkjbyhk"
|
||||
FAKE_SECRET_ID = "epdyrw4tsi03xy3deu8g8ly6o"
|
||||
FAKE_SECRET_NAME = "super_secret"
|
||||
|
||||
# Each method is prefixed with HTTP method (get, post...)
|
||||
# for clarity and readability
|
||||
|
||||
|
||||
def get_fake_version() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {
|
||||
"ApiVersion": "1.35",
|
||||
"Arch": "amd64",
|
||||
"BuildTime": "2018-01-10T20:09:37.000000000+00:00",
|
||||
"Components": [
|
||||
{
|
||||
"Details": {
|
||||
"ApiVersion": "1.35",
|
||||
"Arch": "amd64",
|
||||
"BuildTime": "2018-01-10T20:09:37.000000000+00:00",
|
||||
"Experimental": "false",
|
||||
"GitCommit": "03596f5",
|
||||
"GoVersion": "go1.9.2",
|
||||
"KernelVersion": "4.4.0-112-generic",
|
||||
"MinAPIVersion": "1.12",
|
||||
"Os": "linux",
|
||||
},
|
||||
"Name": "Engine",
|
||||
"Version": "18.01.0-ce",
|
||||
}
|
||||
],
|
||||
"GitCommit": "03596f5",
|
||||
"GoVersion": "go1.9.2",
|
||||
"KernelVersion": "4.4.0-112-generic",
|
||||
"MinAPIVersion": "1.12",
|
||||
"Os": "linux",
|
||||
"Platform": {"Name": ""},
|
||||
"Version": "18.01.0-ce",
|
||||
}
|
||||
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_info() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {
|
||||
"Containers": 1,
|
||||
"Images": 1,
|
||||
"Debug": False,
|
||||
"MemoryLimit": False,
|
||||
"SwapLimit": False,
|
||||
"IPv4Forwarding": True,
|
||||
}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_auth() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Status": "Login Succeeded", "IdentityToken": "9cbaf023786cd7"}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_ping() -> tuple[int, str]:
|
||||
return 200, "OK"
|
||||
|
||||
|
||||
def get_fake_search() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
status_code = 200
|
||||
response = [{"Name": "busybox", "Description": "Fake Description"}]
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_images() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
status_code = 200
|
||||
response = [
|
||||
{
|
||||
"Id": FAKE_IMAGE_ID,
|
||||
"Created": "2 days ago",
|
||||
"Repository": "busybox",
|
||||
"RepoTags": ["busybox:latest", "busybox:1.0"],
|
||||
}
|
||||
]
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_image_history() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
status_code = 200
|
||||
response = [
|
||||
{"Id": "b750fe79269d", "Created": 1364102658, "CreatedBy": "/bin/bash"},
|
||||
{"Id": "27cf78414709", "Created": 1364068391, "CreatedBy": ""},
|
||||
]
|
||||
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_import_image() -> tuple[int, str]:
|
||||
status_code = 200
|
||||
response = "Import messages..."
|
||||
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_containers() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
status_code = 200
|
||||
response = [
|
||||
{
|
||||
"Id": FAKE_CONTAINER_ID,
|
||||
"Image": "busybox:latest",
|
||||
"Created": "2 days ago",
|
||||
"Command": "true",
|
||||
"Status": "fake status",
|
||||
}
|
||||
]
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_start_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_resize_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_create_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_inspect_container(tty: bool = False) -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {
|
||||
"Id": FAKE_CONTAINER_ID,
|
||||
"Config": {"Labels": {"foo": "bar"}, "Privileged": True, "Tty": tty},
|
||||
"ID": FAKE_CONTAINER_ID,
|
||||
"Image": "busybox:latest",
|
||||
"Name": "foobar",
|
||||
"State": {
|
||||
"Status": "running",
|
||||
"Running": True,
|
||||
"Pid": 0,
|
||||
"ExitCode": 0,
|
||||
"StartedAt": "2013-09-25T14:01:18.869545111+02:00",
|
||||
"Ghost": False,
|
||||
},
|
||||
"HostConfig": {
|
||||
"LogConfig": {"Type": "json-file", "Config": {}},
|
||||
},
|
||||
"MacAddress": "02:42:ac:11:00:0a",
|
||||
}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_inspect_image() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {
|
||||
"Id": FAKE_IMAGE_ID,
|
||||
"Parent": "27cf784147099545",
|
||||
"Created": "2013-03-23T22:24:18.818426-07:00",
|
||||
"Container": FAKE_CONTAINER_ID,
|
||||
"Config": {"Labels": {"bar": "foo"}},
|
||||
"ContainerConfig": {
|
||||
"Hostname": "",
|
||||
"User": "",
|
||||
"Memory": 0,
|
||||
"MemorySwap": 0,
|
||||
"AttachStdin": False,
|
||||
"AttachStdout": False,
|
||||
"AttachStderr": False,
|
||||
"PortSpecs": "",
|
||||
"Tty": True,
|
||||
"OpenStdin": True,
|
||||
"StdinOnce": False,
|
||||
"Env": "",
|
||||
"Cmd": ["/bin/bash"],
|
||||
"Dns": "",
|
||||
"Image": "base",
|
||||
"Volumes": "",
|
||||
"VolumesFrom": "",
|
||||
"WorkingDir": "",
|
||||
},
|
||||
"Size": 6823592,
|
||||
}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_insert_image() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"StatusCode": 0}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_wait() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"StatusCode": 0}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_logs() -> tuple[int, bytes]:
|
||||
status_code = 200
|
||||
response = (
|
||||
b"\x01\x00\x00\x00\x00\x00\x00\x00"
|
||||
b"\x02\x00\x00\x00\x00\x00\x00\x00"
|
||||
b"\x01\x00\x00\x00\x00\x00\x00\x11Flowering Nights\n"
|
||||
b"\x01\x00\x00\x00\x00\x00\x00\x10(Sakuya Iyazoi)\n"
|
||||
)
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_diff() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
status_code = 200
|
||||
response = [{"Path": "/test", "Kind": 1}]
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_events() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
status_code = 200
|
||||
response = [
|
||||
{
|
||||
"status": "stop",
|
||||
"id": FAKE_CONTAINER_ID,
|
||||
"from": FAKE_IMAGE_ID,
|
||||
"time": 1423247867,
|
||||
}
|
||||
]
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_export() -> tuple[int, str]:
|
||||
status_code = 200
|
||||
response = "Byte Stream...."
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_exec_create() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_EXEC_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_exec_start() -> tuple[int, bytes]:
|
||||
status_code = 200
|
||||
response = (
|
||||
b"\x01\x00\x00\x00\x00\x00\x00\x11bin\nboot\ndev\netc\n"
|
||||
b"\x01\x00\x00\x00\x00\x00\x00\x12lib\nmnt\nproc\nroot\n"
|
||||
b"\x01\x00\x00\x00\x00\x00\x00\x0csbin\nusr\nvar\n"
|
||||
)
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_exec_resize() -> tuple[int, str]:
|
||||
status_code = 201
|
||||
return status_code, ""
|
||||
|
||||
|
||||
def get_fake_exec_inspect() -> tuple[int, dict[str, t.Any]]:
|
||||
return 200, {
|
||||
"OpenStderr": True,
|
||||
"OpenStdout": True,
|
||||
"Container": get_fake_inspect_container()[1],
|
||||
"Running": False,
|
||||
"ProcessConfig": {
|
||||
"arguments": ["hello world"],
|
||||
"tty": False,
|
||||
"entrypoint": "echo",
|
||||
"privileged": False,
|
||||
"user": "",
|
||||
},
|
||||
"ExitCode": 0,
|
||||
"ID": FAKE_EXEC_ID,
|
||||
"OpenStdin": False,
|
||||
}
|
||||
|
||||
|
||||
def post_fake_stop_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_kill_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_pause_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_unpause_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_restart_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_rename_container() -> tuple[int, None]:
|
||||
status_code = 204
|
||||
return status_code, None
|
||||
|
||||
|
||||
def delete_fake_remove_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_image_create() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_IMAGE_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def delete_fake_remove_image() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_IMAGE_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_get_image() -> tuple[int, str]:
|
||||
status_code = 200
|
||||
response = "Byte Stream...."
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_load_image() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_IMAGE_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_commit() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_push() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_IMAGE_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_build_container() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_CONTAINER_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def post_fake_tag_image() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"Id": FAKE_IMAGE_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_stats() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = fake_stat.OBJ
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_top() -> tuple[int, dict[str, t.Any]]:
|
||||
return 200, {
|
||||
"Processes": [
|
||||
[
|
||||
"root",
|
||||
"26501",
|
||||
"6907",
|
||||
"0",
|
||||
"10:32",
|
||||
"pts/55",
|
||||
"00:00:00",
|
||||
"sleep 60",
|
||||
],
|
||||
],
|
||||
"Titles": [
|
||||
"UID",
|
||||
"PID",
|
||||
"PPID",
|
||||
"C",
|
||||
"STIME",
|
||||
"TTY",
|
||||
"TIME",
|
||||
"CMD",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_fake_volume_list() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {
|
||||
"Volumes": [
|
||||
{
|
||||
"Name": "perfectcherryblossom",
|
||||
"Driver": "local",
|
||||
"Mountpoint": "/var/lib/docker/volumes/perfectcherryblossom",
|
||||
"Scope": "local",
|
||||
},
|
||||
{
|
||||
"Name": "subterraneananimism",
|
||||
"Driver": "local",
|
||||
"Mountpoint": "/var/lib/docker/volumes/subterraneananimism",
|
||||
"Scope": "local",
|
||||
},
|
||||
]
|
||||
}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def get_fake_volume() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {
|
||||
"Name": "perfectcherryblossom",
|
||||
"Driver": "local",
|
||||
"Mountpoint": "/var/lib/docker/volumes/perfectcherryblossom",
|
||||
"Labels": {"com.example.some-label": "some-value"},
|
||||
"Scope": "local",
|
||||
}
|
||||
return status_code, response
|
||||
|
||||
|
||||
def fake_remove_volume() -> tuple[int, None]:
|
||||
return 204, None
|
||||
|
||||
|
||||
def post_fake_update_container() -> tuple[int, dict[str, t.Any]]:
|
||||
return 200, {"Warnings": []}
|
||||
|
||||
|
||||
def post_fake_update_node() -> tuple[int, None]:
|
||||
return 200, None
|
||||
|
||||
|
||||
def post_fake_join_swarm() -> tuple[int, None]:
|
||||
return 200, None
|
||||
|
||||
|
||||
def get_fake_network_list() -> tuple[int, list[dict[str, t.Any]]]:
|
||||
return 200, [
|
||||
{
|
||||
"Name": "bridge",
|
||||
"Id": FAKE_NETWORK_ID,
|
||||
"Scope": "local",
|
||||
"Driver": "bridge",
|
||||
"EnableIPv6": False,
|
||||
"Internal": False,
|
||||
"IPAM": {"Driver": "default", "Config": [{"Subnet": "172.17.0.0/16"}]},
|
||||
"Containers": {
|
||||
FAKE_CONTAINER_ID: {
|
||||
"EndpointID": "ed2419a97c1d99",
|
||||
"MacAddress": "02:42:ac:11:00:02",
|
||||
"IPv4Address": "172.17.0.2/16",
|
||||
"IPv6Address": "",
|
||||
}
|
||||
},
|
||||
"Options": {
|
||||
"com.docker.network.bridge.default_bridge": "true",
|
||||
"com.docker.network.bridge.enable_icc": "true",
|
||||
"com.docker.network.bridge.enable_ip_masquerade": "true",
|
||||
"com.docker.network.bridge.host_binding_ipv4": "0.0.0.0",
|
||||
"com.docker.network.bridge.name": "docker0",
|
||||
"com.docker.network.driver.mtu": "1500",
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def get_fake_network() -> tuple[int, dict[str, t.Any]]:
|
||||
return 200, get_fake_network_list()[1][0]
|
||||
|
||||
|
||||
def post_fake_network() -> tuple[int, dict[str, t.Any]]:
|
||||
return 201, {"Id": FAKE_NETWORK_ID, "Warnings": []}
|
||||
|
||||
|
||||
def delete_fake_network() -> tuple[int, None]:
|
||||
return 204, None
|
||||
|
||||
|
||||
def post_fake_network_connect() -> tuple[int, None]:
|
||||
return 200, None
|
||||
|
||||
|
||||
def post_fake_network_disconnect() -> tuple[int, None]:
|
||||
return 200, None
|
||||
|
||||
|
||||
def post_fake_secret() -> tuple[int, dict[str, t.Any]]:
|
||||
status_code = 200
|
||||
response = {"ID": FAKE_SECRET_ID}
|
||||
return status_code, response
|
||||
|
||||
|
||||
# Maps real api url to fake response callback
|
||||
prefix = "http+docker://localhost" # pylint: disable=invalid-name
|
||||
if constants.IS_WINDOWS_PLATFORM:
|
||||
prefix = "http+docker://localnpipe" # pylint: disable=invalid-name
|
||||
|
||||
fake_responses: dict[str | tuple[str, str], Callable] = {
|
||||
f"{prefix}/version": get_fake_version,
|
||||
f"{prefix}/{CURRENT_VERSION}/version": get_fake_version,
|
||||
f"{prefix}/{CURRENT_VERSION}/info": get_fake_info,
|
||||
f"{prefix}/{CURRENT_VERSION}/auth": post_fake_auth,
|
||||
f"{prefix}/{CURRENT_VERSION}/_ping": get_fake_ping,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/search": get_fake_search,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/json": get_fake_images,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/test_image/history": get_fake_image_history,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_import_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/json": get_fake_containers,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start": post_fake_start_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize": post_fake_resize_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json": get_fake_inspect_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename": post_fake_rename_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag": post_fake_tag_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait": get_fake_wait,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs": get_fake_logs,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes": get_fake_diff,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export": get_fake_export,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update": post_fake_update_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec": post_fake_exec_create,
|
||||
f"{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start": post_fake_exec_start,
|
||||
f"{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json": get_fake_exec_inspect,
|
||||
f"{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize": post_fake_exec_resize,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats": get_fake_stats,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top": get_fake_top,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop": post_fake_stop_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill": post_fake_kill_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause": post_fake_pause_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause": post_fake_unpause_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart": post_fake_restart_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b": delete_fake_remove_container,
|
||||
# TODO: the following is a duplicate of the import endpoint further above!
|
||||
f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_image_create, # noqa: F601
|
||||
f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128": delete_fake_remove_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get": get_fake_get_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/load": post_fake_load_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/test_image/json": get_fake_inspect_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/test_image/insert": get_fake_insert_image,
|
||||
f"{prefix}/{CURRENT_VERSION}/images/test_image/push": post_fake_push,
|
||||
f"{prefix}/{CURRENT_VERSION}/commit": post_fake_commit,
|
||||
f"{prefix}/{CURRENT_VERSION}/containers/create": post_fake_create_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/build": post_fake_build_container,
|
||||
f"{prefix}/{CURRENT_VERSION}/events": get_fake_events,
|
||||
(f"{prefix}/{CURRENT_VERSION}/volumes", "GET"): get_fake_volume_list,
|
||||
(f"{prefix}/{CURRENT_VERSION}/volumes/create", "POST"): get_fake_volume,
|
||||
(f"{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}", "GET"): get_fake_volume,
|
||||
(
|
||||
f"{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}",
|
||||
"DELETE",
|
||||
): fake_remove_volume,
|
||||
(
|
||||
f"{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1",
|
||||
"POST",
|
||||
): post_fake_update_node,
|
||||
(f"{prefix}/{CURRENT_VERSION}/swarm/join", "POST"): post_fake_join_swarm,
|
||||
(f"{prefix}/{CURRENT_VERSION}/networks", "GET"): get_fake_network_list,
|
||||
(f"{prefix}/{CURRENT_VERSION}/networks/create", "POST"): post_fake_network,
|
||||
(f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}", "GET"): get_fake_network,
|
||||
(
|
||||
f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}",
|
||||
"DELETE",
|
||||
): delete_fake_network,
|
||||
(
|
||||
f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect",
|
||||
"POST",
|
||||
): post_fake_network_connect,
|
||||
(
|
||||
f"{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect",
|
||||
"POST",
|
||||
): post_fake_network_disconnect,
|
||||
f"{prefix}/{CURRENT_VERSION}/secrets/create": post_fake_secret,
|
||||
}
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
OBJ = {
|
||||
"read": "2015-02-11T19:20:46.667237763+02:00",
|
||||
"network": {
|
||||
"rx_bytes": 567224,
|
||||
"rx_packets": 3773,
|
||||
"rx_errors": 0,
|
||||
"rx_dropped": 0,
|
||||
"tx_bytes": 1176,
|
||||
"tx_packets": 13,
|
||||
"tx_errors": 0,
|
||||
"tx_dropped": 0,
|
||||
},
|
||||
"cpu_stats": {
|
||||
"cpu_usage": {
|
||||
"total_usage": 157260874053,
|
||||
"percpu_usage": [52196306950, 24118413549, 53292684398, 27653469156],
|
||||
"usage_in_kernelmode": 37140000000,
|
||||
"usage_in_usermode": 62140000000,
|
||||
},
|
||||
"system_cpu_usage": 3.0881377e14,
|
||||
"throttling_data": {"periods": 0, "throttled_periods": 0, "throttled_time": 0},
|
||||
},
|
||||
"memory_stats": {
|
||||
"usage": 179314688,
|
||||
"max_usage": 258166784,
|
||||
"stats": {
|
||||
"active_anon": 90804224,
|
||||
"active_file": 2195456,
|
||||
"cache": 3096576,
|
||||
"hierarchical_memory_limit": 1.844674407371e19,
|
||||
"inactive_anon": 85516288,
|
||||
"inactive_file": 798720,
|
||||
"mapped_file": 2646016,
|
||||
"pgfault": 101034,
|
||||
"pgmajfault": 1207,
|
||||
"pgpgin": 115814,
|
||||
"pgpgout": 75613,
|
||||
"rss": 176218112,
|
||||
"rss_huge": 12582912,
|
||||
"total_active_anon": 90804224,
|
||||
"total_active_file": 2195456,
|
||||
"total_cache": 3096576,
|
||||
"total_inactive_anon": 85516288,
|
||||
"total_inactive_file": 798720,
|
||||
"total_mapped_file": 2646016,
|
||||
"total_pgfault": 101034,
|
||||
"total_pgmajfault": 1207,
|
||||
"total_pgpgin": 115814,
|
||||
"total_pgpgout": 75613,
|
||||
"total_rss": 176218112,
|
||||
"total_rss_huge": 12582912,
|
||||
"total_unevictable": 0,
|
||||
"total_writeback": 0,
|
||||
"unevictable": 0,
|
||||
"writeback": 0,
|
||||
},
|
||||
"failcnt": 0,
|
||||
"limit": 8039038976,
|
||||
},
|
||||
"blkio_stats": {
|
||||
"io_service_bytes_recursive": [
|
||||
{"major": 8, "minor": 0, "op": "Read", "value": 72843264},
|
||||
{"major": 8, "minor": 0, "op": "Write", "value": 4096},
|
||||
{"major": 8, "minor": 0, "op": "Sync", "value": 4096},
|
||||
{"major": 8, "minor": 0, "op": "Async", "value": 72843264},
|
||||
{"major": 8, "minor": 0, "op": "Total", "value": 72847360},
|
||||
],
|
||||
"io_serviced_recursive": [
|
||||
{"major": 8, "minor": 0, "op": "Read", "value": 10581},
|
||||
{"major": 8, "minor": 0, "op": "Write", "value": 1},
|
||||
{"major": 8, "minor": 0, "op": "Sync", "value": 1},
|
||||
{"major": 8, "minor": 0, "op": "Async", "value": 10581},
|
||||
{"major": 8, "minor": 0, "op": "Total", "value": 10582},
|
||||
],
|
||||
"io_queue_recursive": [],
|
||||
"io_service_time_recursive": [],
|
||||
"io_wait_time_recursive": [],
|
||||
"io_merged_recursive": [],
|
||||
"io_time_recursive": [],
|
||||
"sectors_recursive": [],
|
||||
},
|
||||
}
|
||||
|
|
@ -0,0 +1,800 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import shutil
|
||||
import tempfile
|
||||
import typing as t
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api import auth, errors
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.credentials.errors import (
|
||||
CredentialsNotFound,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.credentials.store import (
|
||||
Store,
|
||||
)
|
||||
|
||||
|
||||
class RegressionTest(unittest.TestCase):
|
||||
def test_803_urlsafe_encode(self) -> None:
|
||||
auth_data = {"username": "root", "password": "GR?XGR?XGR?XGR?X"}
|
||||
encoded = auth.encode_header(auth_data)
|
||||
assert b"/" not in encoded
|
||||
assert b"_" in encoded
|
||||
|
||||
|
||||
class ResolveRepositoryNameTest(unittest.TestCase):
|
||||
def test_resolve_repository_name_hub_library_image(self) -> None:
|
||||
assert auth.resolve_repository_name("image") == ("docker.io", "image")
|
||||
|
||||
def test_resolve_repository_name_dotted_hub_library_image(self) -> None:
|
||||
assert auth.resolve_repository_name("image.valid") == (
|
||||
"docker.io",
|
||||
"image.valid",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_hub_image(self) -> None:
|
||||
assert auth.resolve_repository_name("username/image") == (
|
||||
"docker.io",
|
||||
"username/image",
|
||||
)
|
||||
|
||||
def test_explicit_hub_index_library_image(self) -> None:
|
||||
assert auth.resolve_repository_name("docker.io/image") == ("docker.io", "image")
|
||||
|
||||
def test_explicit_legacy_hub_index_library_image(self) -> None:
|
||||
assert auth.resolve_repository_name("index.docker.io/image") == (
|
||||
"docker.io",
|
||||
"image",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_private_registry(self) -> None:
|
||||
assert auth.resolve_repository_name("my.registry.net/image") == (
|
||||
"my.registry.net",
|
||||
"image",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_private_registry_with_port(self) -> None:
|
||||
assert auth.resolve_repository_name("my.registry.net:5000/image") == (
|
||||
"my.registry.net:5000",
|
||||
"image",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_private_registry_with_username(self) -> None:
|
||||
assert auth.resolve_repository_name("my.registry.net/username/image") == (
|
||||
"my.registry.net",
|
||||
"username/image",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_no_dots_but_port(self) -> None:
|
||||
assert auth.resolve_repository_name("hostname:5000/image") == (
|
||||
"hostname:5000",
|
||||
"image",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_no_dots_but_port_and_username(self) -> None:
|
||||
assert auth.resolve_repository_name("hostname:5000/username/image") == (
|
||||
"hostname:5000",
|
||||
"username/image",
|
||||
)
|
||||
|
||||
def test_resolve_repository_name_localhost(self) -> None:
|
||||
assert auth.resolve_repository_name("localhost/image") == ("localhost", "image")
|
||||
|
||||
def test_resolve_repository_name_localhost_with_username(self) -> None:
|
||||
assert auth.resolve_repository_name("localhost/username/image") == (
|
||||
"localhost",
|
||||
"username/image",
|
||||
)
|
||||
|
||||
def test_invalid_index_name(self) -> None:
|
||||
with pytest.raises(errors.InvalidRepository):
|
||||
auth.resolve_repository_name("-gecko.com/image")
|
||||
|
||||
|
||||
def encode_auth(auth_info: dict[str, t.Any]) -> bytes:
|
||||
return base64.b64encode(
|
||||
auth_info.get("username", "").encode("utf-8")
|
||||
+ b":"
|
||||
+ auth_info.get("password", "").encode("utf-8")
|
||||
)
|
||||
|
||||
|
||||
class ResolveAuthTest(unittest.TestCase):
|
||||
index_config = {"auth": encode_auth({"username": "indexuser"})}
|
||||
private_config = {"auth": encode_auth({"username": "privateuser"})}
|
||||
legacy_config = {"auth": encode_auth({"username": "legacyauth"})}
|
||||
|
||||
auth_config = auth.AuthConfig(
|
||||
{
|
||||
"auths": auth.parse_auth(
|
||||
{
|
||||
"https://index.docker.io/v1/": index_config,
|
||||
"my.registry.net": private_config,
|
||||
"http://legacy.registry.url/v1/": legacy_config,
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def test_resolve_authconfig_hostname_only(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "my.registry.net")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_no_protocol(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "my.registry.net/v1/")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_no_path(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "http://my.registry.net")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_no_path_trailing_slash(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "http://my.registry.net/")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_no_path_wrong_secure_proto(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "https://my.registry.net")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_no_path_wrong_insecure_proto(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "http://index.docker.io")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_authconfig_path_wrong_proto(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "https://my.registry.net/v1/")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_default_registry(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_authconfig_default_explicit_none(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, None)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_authconfig_fully_explicit(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "http://my.registry.net/v1/")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_authconfig_legacy_config(self) -> None:
|
||||
ac = auth.resolve_authconfig(self.auth_config, "legacy.registry.url")
|
||||
assert ac is not None
|
||||
assert ac["username"] == "legacyauth"
|
||||
|
||||
def test_resolve_authconfig_no_match(self) -> None:
|
||||
assert auth.resolve_authconfig(self.auth_config, "does.not.exist") is None
|
||||
|
||||
def test_resolve_registry_and_auth_library_image(self) -> None:
|
||||
image = "image"
|
||||
ac = auth.resolve_authconfig(
|
||||
self.auth_config, auth.resolve_repository_name(image)[0]
|
||||
)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_registry_and_auth_hub_image(self) -> None:
|
||||
image = "username/image"
|
||||
ac = auth.resolve_authconfig(
|
||||
self.auth_config, auth.resolve_repository_name(image)[0]
|
||||
)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_registry_and_auth_explicit_hub(self) -> None:
|
||||
image = "docker.io/username/image"
|
||||
ac = auth.resolve_authconfig(
|
||||
self.auth_config, auth.resolve_repository_name(image)[0]
|
||||
)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_registry_and_auth_explicit_legacy_hub(self) -> None:
|
||||
image = "index.docker.io/username/image"
|
||||
ac = auth.resolve_authconfig(
|
||||
self.auth_config, auth.resolve_repository_name(image)[0]
|
||||
)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
def test_resolve_registry_and_auth_private_registry(self) -> None:
|
||||
image = "my.registry.net/image"
|
||||
ac = auth.resolve_authconfig(
|
||||
self.auth_config, auth.resolve_repository_name(image)[0]
|
||||
)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "privateuser"
|
||||
|
||||
def test_resolve_registry_and_auth_unauthenticated_registry(self) -> None:
|
||||
image = "other.registry.net/image"
|
||||
assert (
|
||||
auth.resolve_authconfig(
|
||||
self.auth_config, auth.resolve_repository_name(image)[0]
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
def test_resolve_auth_with_empty_credstore_and_auth_dict(self) -> None:
|
||||
auth_config = auth.AuthConfig(
|
||||
{
|
||||
"auths": auth.parse_auth(
|
||||
{
|
||||
"https://index.docker.io/v1/": self.index_config,
|
||||
}
|
||||
),
|
||||
"credsStore": "blackbox",
|
||||
}
|
||||
)
|
||||
with mock.patch(
|
||||
"ansible_collections.community.docker.plugins.module_utils._api.auth.AuthConfig._resolve_authconfig_credstore"
|
||||
) as m:
|
||||
m.return_value = None
|
||||
ac = auth.resolve_authconfig(auth_config, None)
|
||||
assert ac is not None
|
||||
assert ac["username"] == "indexuser"
|
||||
|
||||
|
||||
class LoadConfigTest(unittest.TestCase):
|
||||
def test_load_config_no_file(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
cfg = auth.load_config(folder)
|
||||
assert cfg is not None
|
||||
|
||||
def test_load_legacy_config(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
cfg_path = os.path.join(folder, ".dockercfg")
|
||||
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
|
||||
with open(cfg_path, "wt", encoding="utf-8") as f:
|
||||
f.write(f"auth = {auth_}\n")
|
||||
f.write("email = sakuya@scarlet.net")
|
||||
|
||||
cfg = auth.load_config(cfg_path)
|
||||
assert auth.resolve_authconfig(cfg) is not None
|
||||
assert cfg.auths[auth.INDEX_NAME] is not None
|
||||
cfg2 = cfg.auths[auth.INDEX_NAME]
|
||||
assert cfg2["username"] == "sakuya"
|
||||
assert cfg2["password"] == "izayoi"
|
||||
assert cfg2["email"] == "sakuya@scarlet.net"
|
||||
assert cfg2.get("Auth") is None
|
||||
|
||||
def test_load_json_config(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
cfg_path = os.path.join(folder, ".dockercfg")
|
||||
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
|
||||
email = "sakuya@scarlet.net"
|
||||
with open(cfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump({auth.INDEX_URL: {"auth": auth_, "email": email}}, f)
|
||||
cfg = auth.load_config(cfg_path)
|
||||
assert auth.resolve_authconfig(cfg) is not None
|
||||
assert cfg.auths[auth.INDEX_URL] is not None
|
||||
cfg2 = cfg.auths[auth.INDEX_URL]
|
||||
assert cfg2["username"] == "sakuya"
|
||||
assert cfg2["password"] == "izayoi"
|
||||
assert cfg2["email"] == email
|
||||
assert cfg2.get("Auth") is None
|
||||
|
||||
def test_load_modern_json_config(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
cfg_path = os.path.join(folder, "config.json")
|
||||
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
|
||||
email = "sakuya@scarlet.net"
|
||||
with open(cfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump({"auths": {auth.INDEX_URL: {"auth": auth_, "email": email}}}, f)
|
||||
cfg = auth.load_config(cfg_path)
|
||||
assert auth.resolve_authconfig(cfg) is not None
|
||||
assert cfg.auths[auth.INDEX_URL] is not None
|
||||
cfg2 = cfg.auths[auth.INDEX_URL]
|
||||
assert cfg2["username"] == "sakuya"
|
||||
assert cfg2["password"] == "izayoi"
|
||||
assert cfg2["email"] == email
|
||||
|
||||
def test_load_config_with_random_name(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
|
||||
dockercfg_path = os.path.join(folder, f".{random.randrange(100000)}.dockercfg")
|
||||
registry = "https://your.private.registry.io"
|
||||
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
|
||||
config = {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
|
||||
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cfg = auth.load_config(dockercfg_path).auths
|
||||
assert registry in cfg
|
||||
assert cfg[registry] is not None
|
||||
cfg2 = cfg[registry]
|
||||
assert cfg2["username"] == "sakuya"
|
||||
assert cfg2["password"] == "izayoi"
|
||||
assert cfg2["email"] == "sakuya@scarlet.net"
|
||||
assert cfg2.get("auth") is None
|
||||
|
||||
def test_load_config_custom_config_env(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
registry = "https://your.private.registry.io"
|
||||
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
|
||||
config = {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
|
||||
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
|
||||
cfg = auth.load_config(None).auths
|
||||
assert registry in cfg
|
||||
assert cfg[registry] is not None
|
||||
cfg2 = cfg[registry]
|
||||
assert cfg2["username"] == "sakuya"
|
||||
assert cfg2["password"] == "izayoi"
|
||||
assert cfg2["email"] == "sakuya@scarlet.net"
|
||||
assert cfg2.get("auth") is None
|
||||
|
||||
def test_load_config_custom_config_env_with_auths(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
registry = "https://your.private.registry.io"
|
||||
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
|
||||
config = {
|
||||
"auths": {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
|
||||
}
|
||||
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
|
||||
cfg = auth.load_config(None)
|
||||
assert registry in cfg.auths
|
||||
cfg2 = cfg.auths[registry]
|
||||
assert cfg2["username"] == "sakuya"
|
||||
assert cfg2["password"] == "izayoi"
|
||||
assert cfg2["email"] == "sakuya@scarlet.net"
|
||||
assert cfg2.get("auth") is None
|
||||
|
||||
def test_load_config_custom_config_env_utf8(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
registry = "https://your.private.registry.io"
|
||||
auth_ = base64.b64encode(b"sakuya\xc3\xa6:izayoi\xc3\xa6").decode("ascii")
|
||||
config = {
|
||||
"auths": {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
|
||||
}
|
||||
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
|
||||
cfg = auth.load_config(None)
|
||||
assert registry in cfg.auths
|
||||
cfg2 = cfg.auths[registry]
|
||||
assert cfg2["username"] == b"sakuya\xc3\xa6".decode("utf8")
|
||||
assert cfg2["password"] == b"izayoi\xc3\xa6".decode("utf8")
|
||||
assert cfg2["email"] == "sakuya@scarlet.net"
|
||||
assert cfg2.get("auth") is None
|
||||
|
||||
def test_load_config_unknown_keys(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
config = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"}
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cfg = auth.load_config(dockercfg_path)
|
||||
assert dict(cfg) == {"auths": {}}
|
||||
|
||||
def test_load_config_invalid_auth_dict(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
config = {"auths": {"scarlet.net": {"sakuya": "izayoi"}}}
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cfg = auth.load_config(dockercfg_path)
|
||||
assert dict(cfg) == {"auths": {"scarlet.net": {}}}
|
||||
|
||||
def test_load_config_identity_token(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
registry = "scarlet.net"
|
||||
token = "1ce1cebb-503e-7043-11aa-7feb8bd4a1ce"
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
auth_entry = encode_auth({"username": "sakuya"}).decode("ascii")
|
||||
config = {"auths": {registry: {"auth": auth_entry, "identitytoken": token}}}
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cfg = auth.load_config(dockercfg_path)
|
||||
assert registry in cfg.auths
|
||||
cfg2 = cfg.auths[registry]
|
||||
assert "IdentityToken" in cfg2
|
||||
assert cfg2["IdentityToken"] == token
|
||||
|
||||
|
||||
class CredstoreTest(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.authconfig = auth.AuthConfig({"credsStore": "default"})
|
||||
self.default_store = InMemoryStore("default")
|
||||
self.authconfig._stores["default"] = self.default_store
|
||||
self.default_store.store(
|
||||
"https://gensokyo.jp/v2",
|
||||
"sakuya",
|
||||
"izayoi",
|
||||
)
|
||||
self.default_store.store(
|
||||
"https://default.com/v2",
|
||||
"user",
|
||||
"hunter2",
|
||||
)
|
||||
|
||||
def test_get_credential_store(self) -> None:
|
||||
auth_config = auth.AuthConfig(
|
||||
{
|
||||
"credHelpers": {
|
||||
"registry1.io": "truesecret",
|
||||
"registry2.io": "powerlock",
|
||||
},
|
||||
"credsStore": "blackbox",
|
||||
}
|
||||
)
|
||||
|
||||
assert auth_config.get_credential_store("registry1.io") == "truesecret"
|
||||
assert auth_config.get_credential_store("registry2.io") == "powerlock"
|
||||
assert auth_config.get_credential_store("registry3.io") == "blackbox"
|
||||
|
||||
def test_get_credential_store_no_default(self) -> None:
|
||||
auth_config = auth.AuthConfig(
|
||||
{
|
||||
"credHelpers": {
|
||||
"registry1.io": "truesecret",
|
||||
"registry2.io": "powerlock",
|
||||
},
|
||||
}
|
||||
)
|
||||
assert auth_config.get_credential_store("registry2.io") == "powerlock"
|
||||
assert auth_config.get_credential_store("registry3.io") is None
|
||||
|
||||
def test_get_credential_store_default_index(self) -> None:
|
||||
auth_config = auth.AuthConfig(
|
||||
{
|
||||
"credHelpers": {"https://index.docker.io/v1/": "powerlock"},
|
||||
"credsStore": "truesecret",
|
||||
}
|
||||
)
|
||||
|
||||
assert auth_config.get_credential_store(None) == "powerlock"
|
||||
assert auth_config.get_credential_store("docker.io") == "powerlock"
|
||||
assert auth_config.get_credential_store("images.io") == "truesecret"
|
||||
|
||||
def test_get_credential_store_with_plain_dict(self) -> None:
|
||||
auth_config = {
|
||||
"credHelpers": {"registry1.io": "truesecret", "registry2.io": "powerlock"},
|
||||
"credsStore": "blackbox",
|
||||
}
|
||||
|
||||
assert auth.get_credential_store(auth_config, "registry1.io") == "truesecret"
|
||||
assert auth.get_credential_store(auth_config, "registry2.io") == "powerlock"
|
||||
assert auth.get_credential_store(auth_config, "registry3.io") == "blackbox"
|
||||
|
||||
def test_get_all_credentials_credstore_only(self) -> None:
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
}
|
||||
|
||||
def test_get_all_credentials_with_empty_credhelper(self) -> None:
|
||||
self.authconfig["credHelpers"] = {
|
||||
"registry1.io": "truesecret",
|
||||
}
|
||||
self.authconfig._stores["truesecret"] = InMemoryStore()
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"registry1.io": None,
|
||||
}
|
||||
|
||||
def test_get_all_credentials_with_credhelpers_only(self) -> None:
|
||||
del self.authconfig["credsStore"]
|
||||
assert self.authconfig.get_all_credentials() == {}
|
||||
|
||||
self.authconfig["credHelpers"] = {
|
||||
"https://gensokyo.jp/v2": "default",
|
||||
"https://default.com/v2": "default",
|
||||
}
|
||||
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
}
|
||||
|
||||
def test_get_all_credentials_with_auths_entries(self) -> None:
|
||||
self.authconfig.add_auth(
|
||||
"registry1.io",
|
||||
{
|
||||
"ServerAddress": "registry1.io",
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
},
|
||||
)
|
||||
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"registry1.io": {
|
||||
"ServerAddress": "registry1.io",
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
},
|
||||
}
|
||||
|
||||
def test_get_all_credentials_with_empty_auths_entry(self) -> None:
|
||||
self.authconfig.add_auth("default.com", {})
|
||||
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
}
|
||||
|
||||
def test_get_all_credentials_credstore_overrides_auth_entry(self) -> None:
|
||||
self.authconfig.add_auth(
|
||||
"default.com",
|
||||
{
|
||||
"Username": "shouldnotsee",
|
||||
"Password": "thisentry",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
)
|
||||
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
}
|
||||
|
||||
def test_get_all_credentials_helpers_override_default(self) -> None:
|
||||
self.authconfig["credHelpers"] = {
|
||||
"https://default.com/v2": "truesecret",
|
||||
}
|
||||
truesecret = InMemoryStore("truesecret")
|
||||
truesecret.store("https://default.com/v2", "reimu", "hakurei")
|
||||
self.authconfig._stores["truesecret"] = truesecret
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
}
|
||||
|
||||
def test_get_all_credentials_3_sources(self) -> None:
|
||||
self.authconfig["credHelpers"] = {
|
||||
"registry1.io": "truesecret",
|
||||
}
|
||||
truesecret = InMemoryStore("truesecret")
|
||||
truesecret.store("registry1.io", "reimu", "hakurei")
|
||||
self.authconfig._stores["truesecret"] = truesecret
|
||||
self.authconfig.add_auth(
|
||||
"registry2.io",
|
||||
{
|
||||
"ServerAddress": "registry2.io",
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
},
|
||||
)
|
||||
|
||||
assert self.authconfig.get_all_credentials() == {
|
||||
"https://gensokyo.jp/v2": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"gensokyo.jp": {
|
||||
"Username": "sakuya",
|
||||
"Password": "izayoi",
|
||||
"ServerAddress": "https://gensokyo.jp/v2",
|
||||
},
|
||||
"https://default.com/v2": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"default.com": {
|
||||
"Username": "user",
|
||||
"Password": "hunter2",
|
||||
"ServerAddress": "https://default.com/v2",
|
||||
},
|
||||
"registry1.io": {
|
||||
"ServerAddress": "registry1.io",
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
},
|
||||
"registry2.io": {
|
||||
"ServerAddress": "registry2.io",
|
||||
"Username": "reimu",
|
||||
"Password": "hakurei",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class InMemoryStore(Store):
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self, *args: t.Any, **kwargs: t.Any
|
||||
) -> None:
|
||||
self.__store: dict[str | bytes, dict[str, t.Any]] = {}
|
||||
|
||||
def get(self, server: str | bytes) -> dict[str, t.Any]:
|
||||
try:
|
||||
return self.__store[server]
|
||||
except KeyError:
|
||||
raise CredentialsNotFound() from None
|
||||
|
||||
def store(self, server: str, username: str, secret: str) -> bytes:
|
||||
self.__store[server] = {
|
||||
"ServerURL": server,
|
||||
"Username": username,
|
||||
"Secret": secret,
|
||||
}
|
||||
return b""
|
||||
|
||||
def list(self) -> dict[str | bytes, str]:
|
||||
return dict((k, v["Username"]) for k, v in self.__store.items())
|
||||
|
||||
def erase(self, server: str | bytes) -> None:
|
||||
del self.__store[server]
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2025 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api import errors
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.constants import (
|
||||
DEFAULT_NPIPE,
|
||||
DEFAULT_UNIX_SOCKET,
|
||||
IS_WINDOWS_PLATFORM,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.context.api import (
|
||||
ContextAPI,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.context.context import (
|
||||
Context,
|
||||
)
|
||||
|
||||
|
||||
class BaseContextTest(unittest.TestCase):
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="Linux specific path check")
|
||||
def test_url_compatibility_on_linux(self) -> None:
|
||||
c = Context("test")
|
||||
assert c.Host == DEFAULT_UNIX_SOCKET[5:]
|
||||
|
||||
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason="Windows specific path check")
|
||||
def test_url_compatibility_on_windows(self) -> None:
|
||||
c = Context("test")
|
||||
assert c.Host == DEFAULT_NPIPE
|
||||
|
||||
def test_fail_on_default_context_create(self) -> None:
|
||||
with pytest.raises(errors.ContextException):
|
||||
ContextAPI.create_context("default")
|
||||
|
||||
def test_default_in_context_list(self) -> None:
|
||||
found = False
|
||||
ctx = ContextAPI.contexts()
|
||||
for c in ctx:
|
||||
if c.Name == "default":
|
||||
found = True
|
||||
assert found is True
|
||||
|
||||
def test_get_current_context(self) -> None:
|
||||
context = ContextAPI.get_current_context()
|
||||
assert context is not None
|
||||
assert context.Name == "default"
|
||||
|
||||
def test_https_host(self) -> None:
|
||||
c = Context("test", host="tcp://testdomain:8080", tls=True)
|
||||
assert c.Host == "https://testdomain:8080"
|
||||
|
||||
def test_context_inspect_without_params(self) -> None:
|
||||
ctx = ContextAPI.inspect_context()
|
||||
assert ctx["Name"] == "default"
|
||||
assert ctx["Metadata"]["StackOrchestrator"] == "swarm"
|
||||
assert ctx["Endpoints"]["docker"]["Host"] in (
|
||||
DEFAULT_NPIPE,
|
||||
DEFAULT_UNIX_SOCKET[5:],
|
||||
)
|
||||
|
|
@ -0,0 +1,135 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
import requests
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
APIError,
|
||||
DockerException,
|
||||
create_api_error_from_http_exception,
|
||||
create_unexpected_kwargs_error,
|
||||
)
|
||||
|
||||
|
||||
class APIErrorTest(unittest.TestCase):
|
||||
def test_api_error_is_caught_by_dockerexception(self) -> None:
|
||||
try:
|
||||
raise APIError("this should be caught by DockerException")
|
||||
except DockerException:
|
||||
pass
|
||||
|
||||
def test_status_code_200(self) -> None:
|
||||
"""The status_code property is present with 200 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 200
|
||||
err = APIError("", response=resp)
|
||||
assert err.status_code == 200
|
||||
|
||||
def test_status_code_400(self) -> None:
|
||||
"""The status_code property is present with 400 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 400
|
||||
err = APIError("", response=resp)
|
||||
assert err.status_code == 400
|
||||
|
||||
def test_status_code_500(self) -> None:
|
||||
"""The status_code property is present with 500 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 500
|
||||
err = APIError("", response=resp)
|
||||
assert err.status_code == 500
|
||||
|
||||
def test_is_server_error_200(self) -> None:
|
||||
"""Report not server error on 200 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 200
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_server_error() is False
|
||||
|
||||
def test_is_server_error_300(self) -> None:
|
||||
"""Report not server error on 300 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 300
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_server_error() is False
|
||||
|
||||
def test_is_server_error_400(self) -> None:
|
||||
"""Report not server error on 400 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 400
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_server_error() is False
|
||||
|
||||
def test_is_server_error_500(self) -> None:
|
||||
"""Report server error on 500 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 500
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_server_error() is True
|
||||
|
||||
def test_is_client_error_500(self) -> None:
|
||||
"""Report not client error on 500 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 500
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_client_error() is False
|
||||
|
||||
def test_is_client_error_400(self) -> None:
|
||||
"""Report client error on 400 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 400
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_client_error() is True
|
||||
|
||||
def test_is_error_300(self) -> None:
|
||||
"""Report no error on 300 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 300
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_error() is False
|
||||
|
||||
def test_is_error_400(self) -> None:
|
||||
"""Report error on 400 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 400
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_error() is True
|
||||
|
||||
def test_is_error_500(self) -> None:
|
||||
"""Report error on 500 response."""
|
||||
resp = requests.Response()
|
||||
resp.status_code = 500
|
||||
err = APIError("", response=resp)
|
||||
assert err.is_error() is True
|
||||
|
||||
def test_create_error_from_exception(self) -> None:
|
||||
resp = requests.Response()
|
||||
resp.status_code = 500
|
||||
err = APIError("")
|
||||
try:
|
||||
resp.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
try:
|
||||
create_api_error_from_http_exception(e)
|
||||
except APIError as e2:
|
||||
err = e2
|
||||
assert err.is_server_error() is True
|
||||
|
||||
|
||||
class CreateUnexpectedKwargsErrorTest(unittest.TestCase):
|
||||
def test_create_unexpected_kwargs_error_single(self) -> None:
|
||||
e = create_unexpected_kwargs_error("f", {"foo": "bar"})
|
||||
assert str(e) == "f() got an unexpected keyword argument 'foo'"
|
||||
|
||||
def test_create_unexpected_kwargs_error_multiple(self) -> None:
|
||||
e = create_unexpected_kwargs_error("f", {"foo": "bar", "baz": "bosh"})
|
||||
assert str(e) == "f() got unexpected keyword arguments 'baz', 'foo'"
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.transport.sshconn import (
|
||||
SSHHTTPAdapter,
|
||||
SSHSocket,
|
||||
)
|
||||
|
||||
|
||||
class SSHAdapterTest(unittest.TestCase):
|
||||
@staticmethod
|
||||
def test_ssh_hostname_prefix_trim() -> None:
|
||||
conn = SSHHTTPAdapter(base_url="ssh://user@hostname:1234", shell_out=True)
|
||||
assert conn.ssh_host == "user@hostname:1234"
|
||||
|
||||
@staticmethod
|
||||
def test_ssh_parse_url() -> None:
|
||||
c = SSHSocket(host="user@hostname:1234")
|
||||
assert c.host == "hostname"
|
||||
assert c.port == "1234"
|
||||
assert c.user == "user"
|
||||
|
||||
@staticmethod
|
||||
def test_ssh_parse_hostname_only() -> None:
|
||||
c = SSHSocket(host="hostname")
|
||||
assert c.host == "hostname"
|
||||
assert c.port is None
|
||||
assert c.user is None
|
||||
|
||||
@staticmethod
|
||||
def test_ssh_parse_user_and_hostname() -> None:
|
||||
c = SSHSocket(host="user@hostname")
|
||||
assert c.host == "hostname"
|
||||
assert c.port is None
|
||||
assert c.user == "user"
|
||||
|
||||
@staticmethod
|
||||
def test_ssh_parse_hostname_and_port() -> None:
|
||||
c = SSHSocket(host="hostname:22")
|
||||
assert c.host == "hostname"
|
||||
assert c.port == "22"
|
||||
assert c.user is None
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.transport import (
|
||||
ssladapter,
|
||||
)
|
||||
|
||||
try:
|
||||
from ssl import CertificateError, match_hostname # type: ignore
|
||||
except ImportError:
|
||||
HAS_MATCH_HOSTNAME = False # pylint: disable=invalid-name
|
||||
else:
|
||||
HAS_MATCH_HOSTNAME = True # pylint: disable=invalid-name
|
||||
|
||||
|
||||
class SSLAdapterTest(unittest.TestCase):
|
||||
def test_only_uses_tls(self) -> None:
|
||||
ssl_context = ssladapter.urllib3.util.ssl_.create_urllib3_context()
|
||||
|
||||
assert ssl_context.options & OP_NO_SSLv3
|
||||
# if OpenSSL is compiled without SSL2 support, OP_NO_SSLv2 will be 0
|
||||
assert not bool(OP_NO_SSLv2) or ssl_context.options & OP_NO_SSLv2
|
||||
assert not ssl_context.options & OP_NO_TLSv1
|
||||
|
||||
|
||||
@pytest.mark.skipif(not HAS_MATCH_HOSTNAME, reason="match_hostname is not available")
|
||||
class MatchHostnameTest(unittest.TestCase):
|
||||
cert = {
|
||||
"issuer": (
|
||||
(("countryName", "US"),),
|
||||
(("stateOrProvinceName", "California"),),
|
||||
(("localityName", "San Francisco"),),
|
||||
(("organizationName", "Docker Inc"),),
|
||||
(("organizationalUnitName", "Docker-Python"),),
|
||||
(("commonName", "localhost"),),
|
||||
(("emailAddress", "info@docker.com"),),
|
||||
),
|
||||
"notAfter": "Mar 25 23:08:23 2030 GMT",
|
||||
"notBefore": "Mar 25 23:08:23 2016 GMT",
|
||||
"serialNumber": "BD5F894C839C548F",
|
||||
"subject": (
|
||||
(("countryName", "US"),),
|
||||
(("stateOrProvinceName", "California"),),
|
||||
(("localityName", "San Francisco"),),
|
||||
(("organizationName", "Docker Inc"),),
|
||||
(("organizationalUnitName", "Docker-Python"),),
|
||||
(("commonName", "localhost"),),
|
||||
(("emailAddress", "info@docker.com"),),
|
||||
),
|
||||
"subjectAltName": (
|
||||
("DNS", "localhost"),
|
||||
("DNS", "*.gensokyo.jp"),
|
||||
("IP Address", "127.0.0.1"),
|
||||
),
|
||||
"version": 3,
|
||||
}
|
||||
|
||||
def test_match_ip_address_success(self) -> None:
|
||||
assert match_hostname(self.cert, "127.0.0.1") is None
|
||||
|
||||
def test_match_localhost_success(self) -> None:
|
||||
assert match_hostname(self.cert, "localhost") is None
|
||||
|
||||
def test_match_dns_success(self) -> None:
|
||||
assert match_hostname(self.cert, "touhou.gensokyo.jp") is None
|
||||
|
||||
def test_match_ip_address_failure(self) -> None:
|
||||
with pytest.raises(CertificateError):
|
||||
match_hostname(self.cert, "192.168.0.25")
|
||||
|
||||
def test_match_dns_failure(self) -> None:
|
||||
with pytest.raises(CertificateError):
|
||||
match_hostname(self.cert, "foobar.co.uk")
|
||||
|
|
@ -0,0 +1,520 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import socket
|
||||
import tarfile
|
||||
import tempfile
|
||||
import typing as t
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.constants import (
|
||||
IS_WINDOWS_PLATFORM,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.build import (
|
||||
exclude_paths,
|
||||
tar,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from collections.abc import Collection
|
||||
|
||||
|
||||
def make_tree(dirs: list[str], files: list[str]) -> str:
|
||||
base = tempfile.mkdtemp()
|
||||
|
||||
for path in dirs:
|
||||
os.makedirs(os.path.join(base, path))
|
||||
|
||||
for path in files:
|
||||
with open(os.path.join(base, path), "wt", encoding="utf-8") as f:
|
||||
f.write("content")
|
||||
|
||||
return base
|
||||
|
||||
|
||||
def convert_paths(collection: Collection[str]) -> set[str]:
|
||||
return set(map(convert_path, collection))
|
||||
|
||||
|
||||
def convert_path(path: str) -> str:
|
||||
return path.replace("/", os.path.sep)
|
||||
|
||||
|
||||
class ExcludePathsTest(unittest.TestCase):
|
||||
dirs = [
|
||||
"foo",
|
||||
"foo/bar",
|
||||
"bar",
|
||||
"target",
|
||||
"target/subdir",
|
||||
"subdir",
|
||||
"subdir/target",
|
||||
"subdir/target/subdir",
|
||||
"subdir/subdir2",
|
||||
"subdir/subdir2/target",
|
||||
"subdir/subdir2/target/subdir",
|
||||
]
|
||||
|
||||
files = [
|
||||
"Dockerfile",
|
||||
"Dockerfile.alt",
|
||||
".dockerignore",
|
||||
"a.py",
|
||||
"a.go",
|
||||
"b.py",
|
||||
"cde.py",
|
||||
"foo/a.py",
|
||||
"foo/b.py",
|
||||
"foo/bar/a.py",
|
||||
"bar/a.py",
|
||||
"foo/Dockerfile3",
|
||||
"target/file.txt",
|
||||
"target/subdir/file.txt",
|
||||
"subdir/file.txt",
|
||||
"subdir/target/file.txt",
|
||||
"subdir/target/subdir/file.txt",
|
||||
"subdir/subdir2/file.txt",
|
||||
"subdir/subdir2/target/file.txt",
|
||||
"subdir/subdir2/target/subdir/file.txt",
|
||||
]
|
||||
|
||||
all_paths = set(dirs + files)
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.base = make_tree(self.dirs, self.files)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
shutil.rmtree(self.base)
|
||||
|
||||
def exclude(self, patterns: list[str], dockerfile: str | None = None) -> set[str]:
|
||||
return set(exclude_paths(self.base, patterns, dockerfile=dockerfile))
|
||||
|
||||
def test_no_excludes(self) -> None:
|
||||
assert self.exclude([""]) == convert_paths(self.all_paths)
|
||||
|
||||
def test_no_dupes(self) -> None:
|
||||
paths = exclude_paths(self.base, ["!a.py"])
|
||||
assert sorted(paths) == sorted(set(paths))
|
||||
|
||||
def test_wildcard_exclude(self) -> None:
|
||||
assert self.exclude(["*"]) == set(["Dockerfile", ".dockerignore"])
|
||||
|
||||
def test_exclude_dockerfile_dockerignore(self) -> None:
|
||||
"""
|
||||
Even if the .dockerignore file explicitly says to exclude
|
||||
Dockerfile and/or .dockerignore, don't exclude them from
|
||||
the actual tar file.
|
||||
"""
|
||||
assert self.exclude(["Dockerfile", ".dockerignore"]) == convert_paths(
|
||||
self.all_paths
|
||||
)
|
||||
|
||||
def test_exclude_custom_dockerfile(self) -> None:
|
||||
"""
|
||||
If we're using a custom Dockerfile, make sure that's not
|
||||
excluded.
|
||||
"""
|
||||
assert self.exclude(["*"], dockerfile="Dockerfile.alt") == set(
|
||||
["Dockerfile.alt", ".dockerignore"]
|
||||
)
|
||||
|
||||
assert self.exclude(["*"], dockerfile="foo/Dockerfile3") == convert_paths(
|
||||
set(["foo/Dockerfile3", ".dockerignore"])
|
||||
)
|
||||
|
||||
# https://github.com/docker/docker-py/issues/1956
|
||||
assert self.exclude(["*"], dockerfile="./foo/Dockerfile3") == convert_paths(
|
||||
set(["foo/Dockerfile3", ".dockerignore"])
|
||||
)
|
||||
|
||||
def test_exclude_dockerfile_child(self) -> None:
|
||||
includes = self.exclude(["foo/"], dockerfile="foo/Dockerfile3")
|
||||
assert convert_path("foo/Dockerfile3") in includes
|
||||
assert convert_path("foo/a.py") not in includes
|
||||
|
||||
def test_single_filename(self) -> None:
|
||||
assert self.exclude(["a.py"]) == convert_paths(self.all_paths - set(["a.py"]))
|
||||
|
||||
def test_single_filename_leading_dot_slash(self) -> None:
|
||||
assert self.exclude(["./a.py"]) == convert_paths(self.all_paths - set(["a.py"]))
|
||||
|
||||
# As odd as it sounds, a filename pattern with a trailing slash on the
|
||||
# end *will* result in that file being excluded.
|
||||
def test_single_filename_trailing_slash(self) -> None:
|
||||
assert self.exclude(["a.py/"]) == convert_paths(self.all_paths - set(["a.py"]))
|
||||
|
||||
def test_wildcard_filename_start(self) -> None:
|
||||
assert self.exclude(["*.py"]) == convert_paths(
|
||||
self.all_paths - set(["a.py", "b.py", "cde.py"])
|
||||
)
|
||||
|
||||
def test_wildcard_with_exception(self) -> None:
|
||||
assert self.exclude(["*.py", "!b.py"]) == convert_paths(
|
||||
self.all_paths - set(["a.py", "cde.py"])
|
||||
)
|
||||
|
||||
def test_wildcard_with_wildcard_exception(self) -> None:
|
||||
assert self.exclude(["*.*", "!*.go"]) == convert_paths(
|
||||
self.all_paths
|
||||
- set(
|
||||
[
|
||||
"a.py",
|
||||
"b.py",
|
||||
"cde.py",
|
||||
"Dockerfile.alt",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_wildcard_filename_end(self) -> None:
|
||||
assert self.exclude(["a.*"]) == convert_paths(
|
||||
self.all_paths - set(["a.py", "a.go"])
|
||||
)
|
||||
|
||||
def test_question_mark(self) -> None:
|
||||
assert self.exclude(["?.py"]) == convert_paths(
|
||||
self.all_paths - set(["a.py", "b.py"])
|
||||
)
|
||||
|
||||
def test_single_subdir_single_filename(self) -> None:
|
||||
assert self.exclude(["foo/a.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py"])
|
||||
)
|
||||
|
||||
def test_single_subdir_single_filename_leading_slash(self) -> None:
|
||||
assert self.exclude(["/foo/a.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py"])
|
||||
)
|
||||
|
||||
def test_exclude_include_absolute_path(self) -> None:
|
||||
base = make_tree([], ["a.py", "b.py"])
|
||||
assert exclude_paths(base, ["/*", "!/*.py"]) == set(["a.py", "b.py"])
|
||||
|
||||
def test_single_subdir_with_path_traversal(self) -> None:
|
||||
assert self.exclude(["foo/whoops/../a.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py"])
|
||||
)
|
||||
|
||||
def test_single_subdir_wildcard_filename(self) -> None:
|
||||
assert self.exclude(["foo/*.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py", "foo/b.py"])
|
||||
)
|
||||
|
||||
def test_wildcard_subdir_single_filename(self) -> None:
|
||||
assert self.exclude(["*/a.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py", "bar/a.py"])
|
||||
)
|
||||
|
||||
def test_wildcard_subdir_wildcard_filename(self) -> None:
|
||||
assert self.exclude(["*/*.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py", "foo/b.py", "bar/a.py"])
|
||||
)
|
||||
|
||||
def test_directory(self) -> None:
|
||||
assert self.exclude(["foo"]) == convert_paths(
|
||||
self.all_paths
|
||||
- set(
|
||||
[
|
||||
"foo",
|
||||
"foo/a.py",
|
||||
"foo/b.py",
|
||||
"foo/bar",
|
||||
"foo/bar/a.py",
|
||||
"foo/Dockerfile3",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_directory_with_trailing_slash(self) -> None:
|
||||
assert self.exclude(["foo"]) == convert_paths(
|
||||
self.all_paths
|
||||
- set(
|
||||
[
|
||||
"foo",
|
||||
"foo/a.py",
|
||||
"foo/b.py",
|
||||
"foo/bar",
|
||||
"foo/bar/a.py",
|
||||
"foo/Dockerfile3",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_directory_with_single_exception(self) -> None:
|
||||
assert self.exclude(["foo", "!foo/bar/a.py"]) == convert_paths(
|
||||
self.all_paths
|
||||
- set(["foo/a.py", "foo/b.py", "foo", "foo/bar", "foo/Dockerfile3"])
|
||||
)
|
||||
|
||||
def test_directory_with_subdir_exception(self) -> None:
|
||||
assert self.exclude(["foo", "!foo/bar"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py", "foo/b.py", "foo", "foo/Dockerfile3"])
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not IS_WINDOWS_PLATFORM, reason="Backslash patterns only on Windows"
|
||||
)
|
||||
def test_directory_with_subdir_exception_win32_pathsep(self) -> None:
|
||||
assert self.exclude(["foo", "!foo\\bar"]) == convert_paths(
|
||||
self.all_paths - set(["foo/a.py", "foo/b.py", "foo", "foo/Dockerfile3"])
|
||||
)
|
||||
|
||||
def test_directory_with_wildcard_exception(self) -> None:
|
||||
assert self.exclude(["foo", "!foo/*.py"]) == convert_paths(
|
||||
self.all_paths - set(["foo/bar", "foo/bar/a.py", "foo", "foo/Dockerfile3"])
|
||||
)
|
||||
|
||||
def test_subdirectory(self) -> None:
|
||||
assert self.exclude(["foo/bar"]) == convert_paths(
|
||||
self.all_paths - set(["foo/bar", "foo/bar/a.py"])
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not IS_WINDOWS_PLATFORM, reason="Backslash patterns only on Windows"
|
||||
)
|
||||
def test_subdirectory_win32_pathsep(self) -> None:
|
||||
assert self.exclude(["foo\\bar"]) == convert_paths(
|
||||
self.all_paths - set(["foo/bar", "foo/bar/a.py"])
|
||||
)
|
||||
|
||||
def test_double_wildcard(self) -> None:
|
||||
assert self.exclude(["**/a.py"]) == convert_paths(
|
||||
self.all_paths - set(["a.py", "foo/a.py", "foo/bar/a.py", "bar/a.py"])
|
||||
)
|
||||
|
||||
assert self.exclude(["foo/**/bar"]) == convert_paths(
|
||||
self.all_paths - set(["foo/bar", "foo/bar/a.py"])
|
||||
)
|
||||
|
||||
def test_single_and_double_wildcard(self) -> None:
|
||||
assert self.exclude(["**/target/*/*"]) == convert_paths(
|
||||
self.all_paths
|
||||
- set(
|
||||
[
|
||||
"target/subdir/file.txt",
|
||||
"subdir/target/subdir/file.txt",
|
||||
"subdir/subdir2/target/subdir/file.txt",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_trailing_double_wildcard(self) -> None:
|
||||
assert self.exclude(["subdir/**"]) == convert_paths(
|
||||
self.all_paths
|
||||
- set(
|
||||
[
|
||||
"subdir/file.txt",
|
||||
"subdir/target/file.txt",
|
||||
"subdir/target/subdir/file.txt",
|
||||
"subdir/subdir2/file.txt",
|
||||
"subdir/subdir2/target/file.txt",
|
||||
"subdir/subdir2/target/subdir/file.txt",
|
||||
"subdir/target",
|
||||
"subdir/target/subdir",
|
||||
"subdir/subdir2",
|
||||
"subdir/subdir2/target",
|
||||
"subdir/subdir2/target/subdir",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_double_wildcard_with_exception(self) -> None:
|
||||
assert self.exclude(["**", "!bar", "!foo/bar"]) == convert_paths(
|
||||
set(
|
||||
[
|
||||
"foo/bar",
|
||||
"foo/bar/a.py",
|
||||
"bar",
|
||||
"bar/a.py",
|
||||
"Dockerfile",
|
||||
".dockerignore",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def test_include_wildcard(self) -> None:
|
||||
# This may be surprising but it matches the CLI's behavior
|
||||
# (tested with 18.05.0-ce on linux)
|
||||
base = make_tree(["a"], ["a/b.py"])
|
||||
assert exclude_paths(base, ["*", "!*/b.py"]) == set()
|
||||
|
||||
def test_last_line_precedence(self) -> None:
|
||||
base = make_tree(
|
||||
[],
|
||||
[
|
||||
"garbage.md",
|
||||
"trash.md",
|
||||
"README.md",
|
||||
"README-bis.md",
|
||||
"README-secret.md",
|
||||
],
|
||||
)
|
||||
assert exclude_paths(base, ["*.md", "!README*.md", "README-secret.md"]) == set(
|
||||
["README.md", "README-bis.md"]
|
||||
)
|
||||
|
||||
def test_parent_directory(self) -> None:
|
||||
base = make_tree([], ["a.py", "b.py", "c.py"])
|
||||
# Dockerignore reference stipulates that absolute paths are
|
||||
# equivalent to relative paths, hence /../foo should be
|
||||
# equivalent to ../foo. It also stipulates that paths are run
|
||||
# through Go's filepath.Clean, which explicitly "replace
|
||||
# "/.." by "/" at the beginning of a path".
|
||||
assert exclude_paths(base, ["../a.py", "/../b.py"]) == set(["c.py"])
|
||||
|
||||
|
||||
class TarTest(unittest.TestCase):
|
||||
def test_tar_with_excludes(self) -> None:
|
||||
dirs = [
|
||||
"foo",
|
||||
"foo/bar",
|
||||
"bar",
|
||||
]
|
||||
|
||||
files = [
|
||||
"Dockerfile",
|
||||
"Dockerfile.alt",
|
||||
".dockerignore",
|
||||
"a.py",
|
||||
"a.go",
|
||||
"b.py",
|
||||
"cde.py",
|
||||
"foo/a.py",
|
||||
"foo/b.py",
|
||||
"foo/bar/a.py",
|
||||
"bar/a.py",
|
||||
]
|
||||
|
||||
exclude = [
|
||||
"*.py",
|
||||
"!b.py",
|
||||
"!a.go",
|
||||
"foo",
|
||||
"Dockerfile*",
|
||||
".dockerignore",
|
||||
]
|
||||
|
||||
expected_names = set(
|
||||
[
|
||||
"Dockerfile",
|
||||
".dockerignore",
|
||||
"a.go",
|
||||
"b.py",
|
||||
"bar",
|
||||
"bar/a.py",
|
||||
]
|
||||
)
|
||||
|
||||
base = make_tree(dirs, files)
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
|
||||
with tar(base, exclude=exclude) as archive, tarfile.open(
|
||||
fileobj=archive
|
||||
) as tar_data:
|
||||
assert sorted(tar_data.getnames()) == sorted(expected_names)
|
||||
|
||||
def test_tar_with_empty_directory(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
for d in ["foo", "bar"]:
|
||||
os.makedirs(os.path.join(base, d))
|
||||
with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
|
||||
assert sorted(tar_data.getnames()) == ["bar", "foo"]
|
||||
|
||||
@pytest.mark.skipif(
|
||||
IS_WINDOWS_PLATFORM or os.geteuid() == 0,
|
||||
reason="root user always has access ; no chmod on Windows",
|
||||
)
|
||||
def test_tar_with_inaccessible_file(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
full_path = os.path.join(base, "foo")
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
with open(full_path, "wt", encoding="utf-8") as f:
|
||||
f.write("content")
|
||||
os.chmod(full_path, 0o222)
|
||||
with pytest.raises(IOError) as ei:
|
||||
tar(base)
|
||||
|
||||
assert f"Can not read file in context: {full_path}" in ei.exconly()
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows")
|
||||
def test_tar_with_file_symlinks(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
with open(os.path.join(base, "foo"), "wt", encoding="utf-8") as f:
|
||||
f.write("content")
|
||||
os.makedirs(os.path.join(base, "bar"))
|
||||
os.symlink("../foo", os.path.join(base, "bar/foo"))
|
||||
with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
|
||||
assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"]
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows")
|
||||
def test_tar_with_directory_symlinks(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
for d in ["foo", "bar"]:
|
||||
os.makedirs(os.path.join(base, d))
|
||||
os.symlink("../foo", os.path.join(base, "bar/foo"))
|
||||
with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
|
||||
assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"]
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows")
|
||||
def test_tar_with_broken_symlinks(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
for d in ["foo", "bar"]:
|
||||
os.makedirs(os.path.join(base, d))
|
||||
|
||||
os.symlink("../baz", os.path.join(base, "bar/foo"))
|
||||
with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
|
||||
assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"]
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No UNIX sockets on Win32")
|
||||
def test_tar_socket_file(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
for d in ["foo", "bar"]:
|
||||
os.makedirs(os.path.join(base, d))
|
||||
sock = socket.socket(socket.AF_UNIX)
|
||||
self.addCleanup(sock.close)
|
||||
sock.bind(os.path.join(base, "test.sock"))
|
||||
with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
|
||||
assert sorted(tar_data.getnames()) == ["bar", "foo"]
|
||||
|
||||
def tar_test_negative_mtime_bug(self) -> None:
|
||||
base = tempfile.mkdtemp()
|
||||
filename = os.path.join(base, "th.txt")
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
with open(filename, "wt", encoding="utf-8") as f:
|
||||
f.write("Invisible Full Moon")
|
||||
os.utime(filename, (12345, -3600.0))
|
||||
with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
|
||||
assert tar_data.getnames() == ["th.txt"]
|
||||
assert tar_data.getmember("th.txt").mtime == -3600
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows")
|
||||
def test_tar_directory_link(self) -> None:
|
||||
dirs = ["a", "b", "a/c"]
|
||||
files = ["a/hello.py", "b/utils.py", "a/c/descend.py"]
|
||||
base = make_tree(dirs, files)
|
||||
self.addCleanup(shutil.rmtree, base)
|
||||
os.symlink(os.path.join(base, "b"), os.path.join(base, "a/c/b"))
|
||||
with tar(base) as archive:
|
||||
with tarfile.open(fileobj=archive) as tar_data:
|
||||
names = tar_data.getnames()
|
||||
for member in dirs + files:
|
||||
assert member in names
|
||||
assert "a/c/b" in names
|
||||
assert "a/c/b/utils.py" not in names
|
||||
|
|
@ -0,0 +1,122 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import typing as t
|
||||
import unittest
|
||||
from collections.abc import Callable
|
||||
from unittest import mock
|
||||
|
||||
from pytest import fixture, mark
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils import config
|
||||
|
||||
|
||||
class FindConfigFileTest(unittest.TestCase):
|
||||
mkdir: Callable[[str], os.PathLike[str]]
|
||||
|
||||
@fixture(autouse=True)
|
||||
def tmpdir(self, tmpdir: t.Any) -> None:
|
||||
self.mkdir = tmpdir.mkdir
|
||||
|
||||
def test_find_config_fallback(self) -> None:
|
||||
tmpdir = self.mkdir("test_find_config_fallback")
|
||||
|
||||
with mock.patch.dict(os.environ, {"HOME": str(tmpdir)}):
|
||||
assert config.find_config_file() is None
|
||||
|
||||
def test_find_config_from_explicit_path(self) -> None:
|
||||
tmpdir = self.mkdir("test_find_config_from_explicit_path")
|
||||
config_path = tmpdir.ensure("my-config-file.json") # type: ignore[attr-defined]
|
||||
|
||||
assert config.find_config_file(str(config_path)) == str(config_path)
|
||||
|
||||
def test_find_config_from_environment(self) -> None:
|
||||
tmpdir = self.mkdir("test_find_config_from_environment")
|
||||
config_path = tmpdir.ensure("config.json") # type: ignore[attr-defined]
|
||||
|
||||
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": str(tmpdir)}):
|
||||
assert config.find_config_file() == str(config_path)
|
||||
|
||||
@mark.skipif("sys.platform == 'win32'")
|
||||
def test_find_config_from_home_posix(self) -> None:
|
||||
tmpdir = self.mkdir("test_find_config_from_home_posix")
|
||||
config_path = tmpdir.ensure(".docker", "config.json") # type: ignore[attr-defined]
|
||||
|
||||
with mock.patch.dict(os.environ, {"HOME": str(tmpdir)}):
|
||||
assert config.find_config_file() == str(config_path)
|
||||
|
||||
@mark.skipif("sys.platform == 'win32'")
|
||||
def test_find_config_from_home_legacy_name(self) -> None:
|
||||
tmpdir = self.mkdir("test_find_config_from_home_legacy_name")
|
||||
config_path = tmpdir.ensure(".dockercfg") # type: ignore[attr-defined]
|
||||
|
||||
with mock.patch.dict(os.environ, {"HOME": str(tmpdir)}):
|
||||
assert config.find_config_file() == str(config_path)
|
||||
|
||||
@mark.skipif("sys.platform != 'win32'")
|
||||
def test_find_config_from_home_windows(self) -> None:
|
||||
tmpdir = self.mkdir("test_find_config_from_home_windows")
|
||||
config_path = tmpdir.ensure(".docker", "config.json") # type: ignore[attr-defined]
|
||||
|
||||
with mock.patch.dict(os.environ, {"USERPROFILE": str(tmpdir)}):
|
||||
assert config.find_config_file() == str(config_path)
|
||||
|
||||
|
||||
class LoadConfigTest(unittest.TestCase):
|
||||
def test_load_config_no_file(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
cfg = config.load_general_config(folder)
|
||||
assert cfg is not None
|
||||
assert isinstance(cfg, dict)
|
||||
assert not cfg
|
||||
|
||||
def test_load_config_custom_headers(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
config_data = {
|
||||
"HttpHeaders": {"Name": "Spike", "Surname": "Spiegel"},
|
||||
}
|
||||
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config_data, f)
|
||||
|
||||
cfg = config.load_general_config(dockercfg_path)
|
||||
assert "HttpHeaders" in cfg
|
||||
assert cfg["HttpHeaders"] == {"Name": "Spike", "Surname": "Spiegel"}
|
||||
|
||||
def test_load_config_detach_keys(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
config_data = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"}
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config_data, f)
|
||||
|
||||
cfg = config.load_general_config(dockercfg_path)
|
||||
assert cfg == config_data
|
||||
|
||||
def test_load_config_from_env(self) -> None:
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
dockercfg_path = os.path.join(folder, "config.json")
|
||||
config_data = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"}
|
||||
with open(dockercfg_path, "wt", encoding="utf-8") as f:
|
||||
json.dump(config_data, f)
|
||||
|
||||
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
|
||||
cfg = config.load_general_config(None)
|
||||
assert cfg == config_data
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
import unittest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.api.client import (
|
||||
APIClient,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.decorators import (
|
||||
update_headers,
|
||||
)
|
||||
from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.constants import (
|
||||
DEFAULT_DOCKER_API_VERSION,
|
||||
)
|
||||
|
||||
|
||||
class DecoratorsTest(unittest.TestCase):
|
||||
def test_update_headers(self) -> None:
|
||||
sample_headers = {
|
||||
"X-Docker-Locale": "en-US",
|
||||
}
|
||||
|
||||
def f(self: t.Any, headers: t.Any = None) -> t.Any:
|
||||
return headers
|
||||
|
||||
client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
|
||||
client._general_configs = {}
|
||||
|
||||
g = update_headers(f)
|
||||
assert g(client, headers=None) is None
|
||||
assert g(client, headers={}) == {}
|
||||
assert g(client, headers={"Content-type": "application/json"}) == {
|
||||
"Content-type": "application/json",
|
||||
}
|
||||
|
||||
client._general_configs = {"HttpHeaders": sample_headers}
|
||||
|
||||
assert g(client, headers=None) == sample_headers
|
||||
assert g(client, headers={}) == sample_headers
|
||||
assert g(client, headers={"Content-type": "application/json"}) == {
|
||||
"Content-type": "application/json",
|
||||
"X-Docker-Locale": "en-US",
|
||||
}
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.json_stream import (
|
||||
json_splitter,
|
||||
json_stream,
|
||||
stream_as_text,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
def create_generator(input_sequence: list[T]) -> t.Generator[T]:
|
||||
yield from input_sequence
|
||||
|
||||
|
||||
class TestJsonSplitter:
|
||||
def test_json_splitter_no_object(self) -> None:
|
||||
data = '{"foo": "bar'
|
||||
assert json_splitter(data) is None
|
||||
|
||||
def test_json_splitter_with_object(self) -> None:
|
||||
data = '{"foo": "bar"}\n \n{"next": "obj"}'
|
||||
assert json_splitter(data) == ({"foo": "bar"}, '{"next": "obj"}')
|
||||
|
||||
def test_json_splitter_leading_whitespace(self) -> None:
|
||||
data = '\n \r{"foo": "bar"}\n\n {"next": "obj"}'
|
||||
assert json_splitter(data) == ({"foo": "bar"}, '{"next": "obj"}')
|
||||
|
||||
|
||||
class TestStreamAsText:
|
||||
def test_stream_with_non_utf_unicode_character(self) -> None:
|
||||
stream = create_generator([b"\xed\xf3\xf3"])
|
||||
(output,) = stream_as_text(stream)
|
||||
assert output == "<EFBFBD><EFBFBD><EFBFBD>"
|
||||
|
||||
def test_stream_with_utf_character(self) -> None:
|
||||
stream = create_generator(["ěĝ".encode("utf-8")])
|
||||
(output,) = stream_as_text(stream)
|
||||
assert output == "ěĝ"
|
||||
|
||||
|
||||
class TestJsonStream:
|
||||
def test_with_falsy_entries(self) -> None:
|
||||
stream = create_generator(
|
||||
[
|
||||
'{"one": "two"}\n{}\n',
|
||||
"[1, 2, 3]\n[]\n",
|
||||
]
|
||||
)
|
||||
output = list(json_stream(stream))
|
||||
assert output == [
|
||||
{"one": "two"},
|
||||
{},
|
||||
[1, 2, 3],
|
||||
[],
|
||||
]
|
||||
|
||||
def test_with_leading_whitespace(self) -> None:
|
||||
stream = create_generator(
|
||||
['\n \r\n {"one": "two"}{"x": 1}', ' {"three": "four"}\t\t{"x": 2}']
|
||||
)
|
||||
output = list(json_stream(stream))
|
||||
assert output == [{"one": "two"}, {"x": 1}, {"three": "four"}, {"x": 2}]
|
||||
|
|
@ -0,0 +1,152 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.ports import (
|
||||
build_port_bindings,
|
||||
split_port,
|
||||
)
|
||||
|
||||
|
||||
class PortsTest(unittest.TestCase):
|
||||
def test_split_port_with_host_ip(self) -> None:
|
||||
internal_port, external_port = split_port("127.0.0.1:1000:2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port == [("127.0.0.1", "1000")]
|
||||
|
||||
def test_split_port_with_protocol(self) -> None:
|
||||
for protocol in ["tcp", "udp", "sctp"]:
|
||||
internal_port, external_port = split_port("127.0.0.1:1000:2000/" + protocol)
|
||||
assert internal_port == ["2000/" + protocol]
|
||||
assert external_port == [("127.0.0.1", "1000")]
|
||||
|
||||
def test_split_port_with_host_ip_no_port(self) -> None:
|
||||
internal_port, external_port = split_port("127.0.0.1::2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port == [("127.0.0.1", None)]
|
||||
|
||||
def test_split_port_range_with_host_ip_no_port(self) -> None:
|
||||
internal_port, external_port = split_port("127.0.0.1::2000-2001")
|
||||
assert internal_port == ["2000", "2001"]
|
||||
assert external_port == [("127.0.0.1", None), ("127.0.0.1", None)]
|
||||
|
||||
def test_split_port_with_host_port(self) -> None:
|
||||
internal_port, external_port = split_port("1000:2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port == ["1000"]
|
||||
|
||||
def test_split_port_range_with_host_port(self) -> None:
|
||||
internal_port, external_port = split_port("1000-1001:2000-2001")
|
||||
assert internal_port == ["2000", "2001"]
|
||||
assert external_port == ["1000", "1001"]
|
||||
|
||||
def test_split_port_random_port_range_with_host_port(self) -> None:
|
||||
internal_port, external_port = split_port("1000-1001:2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port == ["1000-1001"]
|
||||
|
||||
def test_split_port_no_host_port(self) -> None:
|
||||
internal_port, external_port = split_port("2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port is None
|
||||
|
||||
def test_split_port_range_no_host_port(self) -> None:
|
||||
internal_port, external_port = split_port("2000-2001")
|
||||
assert internal_port == ["2000", "2001"]
|
||||
assert external_port is None
|
||||
|
||||
def test_split_port_range_with_protocol(self) -> None:
|
||||
internal_port, external_port = split_port("127.0.0.1:1000-1001:2000-2001/udp")
|
||||
assert internal_port == ["2000/udp", "2001/udp"]
|
||||
assert external_port == [("127.0.0.1", "1000"), ("127.0.0.1", "1001")]
|
||||
|
||||
def test_split_port_with_ipv6_address(self) -> None:
|
||||
internal_port, external_port = split_port("2001:abcd:ef00::2:1000:2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port == [("2001:abcd:ef00::2", "1000")]
|
||||
|
||||
def test_split_port_with_ipv6_square_brackets_address(self) -> None:
|
||||
internal_port, external_port = split_port("[2001:abcd:ef00::2]:1000:2000")
|
||||
assert internal_port == ["2000"]
|
||||
assert external_port == [("2001:abcd:ef00::2", "1000")]
|
||||
|
||||
def test_split_port_invalid(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("0.0.0.0:1000:2000:tcp")
|
||||
|
||||
def test_split_port_invalid_protocol(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("0.0.0.0:1000:2000/ftp")
|
||||
|
||||
def test_non_matching_length_port_ranges(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("0.0.0.0:1000-1010:2000-2002/tcp")
|
||||
|
||||
def test_port_and_range_invalid(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("0.0.0.0:1000:2000-2002/tcp")
|
||||
|
||||
def test_port_only_with_colon(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port(":80")
|
||||
|
||||
def test_host_only_with_colon(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("localhost:")
|
||||
|
||||
def test_with_no_container_port(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("localhost:80:")
|
||||
|
||||
def test_split_port_empty_string(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
split_port("")
|
||||
|
||||
def test_split_port_non_string(self) -> None:
|
||||
assert split_port(1243) == (["1243"], None)
|
||||
|
||||
def test_build_port_bindings_with_one_port(self) -> None:
|
||||
port_bindings = build_port_bindings(["127.0.0.1:1000:1000"])
|
||||
assert port_bindings["1000"] == [("127.0.0.1", "1000")]
|
||||
|
||||
def test_build_port_bindings_with_matching_internal_ports(self) -> None:
|
||||
port_bindings = build_port_bindings(
|
||||
["127.0.0.1:1000:1000", "127.0.0.1:2000:1000"]
|
||||
)
|
||||
assert port_bindings["1000"] == [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]
|
||||
|
||||
def test_build_port_bindings_with_nonmatching_internal_ports(self) -> None:
|
||||
port_bindings = build_port_bindings(
|
||||
["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]
|
||||
)
|
||||
assert port_bindings["1000"] == [("127.0.0.1", "1000")]
|
||||
assert port_bindings["2000"] == [("127.0.0.1", "2000")]
|
||||
|
||||
def test_build_port_bindings_with_port_range(self) -> None:
|
||||
port_bindings = build_port_bindings(["127.0.0.1:1000-1001:1000-1001"])
|
||||
assert port_bindings["1000"] == [("127.0.0.1", "1000")]
|
||||
assert port_bindings["1001"] == [("127.0.0.1", "1001")]
|
||||
|
||||
def test_build_port_bindings_with_matching_internal_port_ranges(self) -> None:
|
||||
port_bindings = build_port_bindings(
|
||||
["127.0.0.1:1000-1001:1000-1001", "127.0.0.1:2000-2001:1000-1001"]
|
||||
)
|
||||
assert port_bindings["1000"] == [("127.0.0.1", "1000"), ("127.0.0.1", "2000")]
|
||||
assert port_bindings["1001"] == [("127.0.0.1", "1001"), ("127.0.0.1", "2001")]
|
||||
|
||||
def test_build_port_bindings_with_nonmatching_internal_port_ranges(self) -> None:
|
||||
port_bindings = build_port_bindings(
|
||||
["127.0.0.1:1000:1000", "127.0.0.1:2000:2000"]
|
||||
)
|
||||
assert port_bindings["1000"] == [("127.0.0.1", "1000")]
|
||||
assert port_bindings["2000"] == [("127.0.0.1", "2000")]
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.proxy import (
|
||||
ProxyConfig,
|
||||
)
|
||||
|
||||
HTTP = "http://test:80"
|
||||
HTTPS = "https://test:443"
|
||||
FTP = "ftp://user:password@host:23"
|
||||
NO_PROXY = "localhost,.localdomain"
|
||||
CONFIG = ProxyConfig(http=HTTP, https=HTTPS, ftp=FTP, no_proxy=NO_PROXY)
|
||||
ENV = {
|
||||
"http_proxy": HTTP,
|
||||
"HTTP_PROXY": HTTP,
|
||||
"https_proxy": HTTPS,
|
||||
"HTTPS_PROXY": HTTPS,
|
||||
"ftp_proxy": FTP,
|
||||
"FTP_PROXY": FTP,
|
||||
"no_proxy": NO_PROXY,
|
||||
"NO_PROXY": NO_PROXY,
|
||||
}
|
||||
|
||||
|
||||
class ProxyConfigTest(unittest.TestCase):
|
||||
def test_from_dict(self) -> None:
|
||||
config = ProxyConfig.from_dict(
|
||||
{
|
||||
"httpProxy": HTTP,
|
||||
"httpsProxy": HTTPS,
|
||||
"ftpProxy": FTP,
|
||||
"noProxy": NO_PROXY,
|
||||
}
|
||||
)
|
||||
self.assertEqual(CONFIG.http, config.http)
|
||||
self.assertEqual(CONFIG.https, config.https)
|
||||
self.assertEqual(CONFIG.ftp, config.ftp)
|
||||
self.assertEqual(CONFIG.no_proxy, config.no_proxy)
|
||||
|
||||
def test_new(self) -> None:
|
||||
config = ProxyConfig()
|
||||
self.assertIsNone(config.http)
|
||||
self.assertIsNone(config.https)
|
||||
self.assertIsNone(config.ftp)
|
||||
self.assertIsNone(config.no_proxy)
|
||||
|
||||
config = ProxyConfig(http="a", https="b", ftp="c", no_proxy="d")
|
||||
self.assertEqual(config.http, "a")
|
||||
self.assertEqual(config.https, "b")
|
||||
self.assertEqual(config.ftp, "c")
|
||||
self.assertEqual(config.no_proxy, "d")
|
||||
|
||||
def test_truthiness(self) -> None:
|
||||
assert not ProxyConfig()
|
||||
assert ProxyConfig(http="non-zero")
|
||||
assert ProxyConfig(https="non-zero")
|
||||
assert ProxyConfig(ftp="non-zero")
|
||||
assert ProxyConfig(no_proxy="non-zero")
|
||||
|
||||
def test_environment(self) -> None:
|
||||
self.assertDictEqual(CONFIG.get_environment(), ENV)
|
||||
empty = ProxyConfig()
|
||||
self.assertDictEqual(empty.get_environment(), {})
|
||||
|
||||
def test_inject_proxy_environment(self) -> None:
|
||||
# Proxy config is non null, env is None.
|
||||
envlist = CONFIG.inject_proxy_environment(None)
|
||||
assert envlist is not None
|
||||
self.assertSetEqual(
|
||||
set(envlist),
|
||||
set(f"{k}={v}" for k, v in ENV.items()),
|
||||
)
|
||||
|
||||
# Proxy config is null, env is None.
|
||||
self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None)
|
||||
|
||||
env = ["FOO=BAR", "BAR=BAZ"]
|
||||
|
||||
# Proxy config is non null, env is non null
|
||||
actual = CONFIG.inject_proxy_environment(env)
|
||||
expected = [f"{k}={v}" for k, v in ENV.items()] + env
|
||||
# It's important that the first 8 variables are the ones from the proxy
|
||||
# config, and the last 2 are the ones from the input environment
|
||||
self.assertSetEqual(set(actual[:8]), set(expected[:8]))
|
||||
self.assertSetEqual(set(actual[-2:]), set(expected[-2:]))
|
||||
|
||||
# Proxy is null, and is non null
|
||||
self.assertListEqual(ProxyConfig().inject_proxy_environment(env), env)
|
||||
|
|
@ -0,0 +1,481 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.api.client import (
|
||||
APIClient,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.constants import (
|
||||
IS_WINDOWS_PLATFORM,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
DockerException,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.utils import (
|
||||
convert_filters,
|
||||
convert_volume_binds,
|
||||
decode_json_header,
|
||||
format_environment,
|
||||
kwargs_from_env,
|
||||
parse_bytes,
|
||||
parse_devices,
|
||||
parse_env_file,
|
||||
parse_host,
|
||||
parse_repository_tag,
|
||||
split_command,
|
||||
)
|
||||
from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.constants import (
|
||||
DEFAULT_DOCKER_API_VERSION,
|
||||
)
|
||||
|
||||
TEST_CERT_DIR = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"testdata/certs",
|
||||
)
|
||||
|
||||
|
||||
class KwargsFromEnvTest(unittest.TestCase):
|
||||
os_environ: dict[str, str]
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.os_environ = os.environ.copy()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
for k, v in self.os_environ.items():
|
||||
if os.environ.get(k) != v:
|
||||
os.environ[k] = v
|
||||
for k in os.environ:
|
||||
if k not in self.os_environ:
|
||||
os.environ.pop(k)
|
||||
|
||||
def test_kwargs_from_env_empty(self) -> None:
|
||||
os.environ.update(DOCKER_HOST="", DOCKER_CERT_PATH="")
|
||||
os.environ.pop("DOCKER_TLS_VERIFY", None)
|
||||
|
||||
kwargs = kwargs_from_env()
|
||||
assert kwargs.get("base_url") is None
|
||||
assert kwargs.get("tls") is None
|
||||
|
||||
def test_kwargs_from_env_tls(self) -> None:
|
||||
os.environ.update(
|
||||
DOCKER_HOST="tcp://192.168.59.103:2376",
|
||||
DOCKER_CERT_PATH=TEST_CERT_DIR,
|
||||
DOCKER_TLS_VERIFY="1",
|
||||
)
|
||||
kwargs = kwargs_from_env(assert_hostname=False)
|
||||
assert kwargs["base_url"] == "tcp://192.168.59.103:2376"
|
||||
assert "ca.pem" in kwargs["tls"].ca_cert
|
||||
assert "cert.pem" in kwargs["tls"].cert[0]
|
||||
assert "key.pem" in kwargs["tls"].cert[1]
|
||||
assert kwargs["tls"].assert_hostname is False
|
||||
assert kwargs["tls"].verify
|
||||
|
||||
parsed_host = parse_host(kwargs["base_url"], IS_WINDOWS_PLATFORM, True)
|
||||
kwargs["version"] = DEFAULT_DOCKER_API_VERSION
|
||||
try:
|
||||
client = APIClient(**kwargs)
|
||||
assert parsed_host == client.base_url
|
||||
assert kwargs["tls"].ca_cert == client.verify
|
||||
assert kwargs["tls"].cert == client.cert
|
||||
except TypeError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_kwargs_from_env_tls_verify_false(self) -> None:
|
||||
os.environ.update(
|
||||
DOCKER_HOST="tcp://192.168.59.103:2376",
|
||||
DOCKER_CERT_PATH=TEST_CERT_DIR,
|
||||
DOCKER_TLS_VERIFY="",
|
||||
)
|
||||
kwargs = kwargs_from_env(assert_hostname=True)
|
||||
assert kwargs["base_url"] == "tcp://192.168.59.103:2376"
|
||||
assert "ca.pem" in kwargs["tls"].ca_cert
|
||||
assert "cert.pem" in kwargs["tls"].cert[0]
|
||||
assert "key.pem" in kwargs["tls"].cert[1]
|
||||
assert kwargs["tls"].assert_hostname is True
|
||||
assert kwargs["tls"].verify is False
|
||||
parsed_host = parse_host(kwargs["base_url"], IS_WINDOWS_PLATFORM, True)
|
||||
kwargs["version"] = DEFAULT_DOCKER_API_VERSION
|
||||
try:
|
||||
client = APIClient(**kwargs)
|
||||
assert parsed_host == client.base_url
|
||||
assert kwargs["tls"].cert == client.cert
|
||||
assert not kwargs["tls"].verify
|
||||
except TypeError as e:
|
||||
self.fail(e)
|
||||
|
||||
def test_kwargs_from_env_tls_verify_false_no_cert(self) -> None:
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
cert_dir = os.path.join(temp_dir, ".docker")
|
||||
shutil.copytree(TEST_CERT_DIR, cert_dir)
|
||||
|
||||
os.environ.update(
|
||||
DOCKER_HOST="tcp://192.168.59.103:2376", HOME=temp_dir, DOCKER_TLS_VERIFY=""
|
||||
)
|
||||
os.environ.pop("DOCKER_CERT_PATH", None)
|
||||
kwargs = kwargs_from_env(assert_hostname=True)
|
||||
assert kwargs["base_url"] == "tcp://192.168.59.103:2376"
|
||||
|
||||
def test_kwargs_from_env_no_cert_path(self) -> None:
|
||||
try:
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
cert_dir = os.path.join(temp_dir, ".docker")
|
||||
shutil.copytree(TEST_CERT_DIR, cert_dir)
|
||||
|
||||
os.environ.update(HOME=temp_dir, DOCKER_CERT_PATH="", DOCKER_TLS_VERIFY="1")
|
||||
|
||||
kwargs = kwargs_from_env()
|
||||
assert kwargs["tls"].verify
|
||||
assert cert_dir in kwargs["tls"].ca_cert
|
||||
assert cert_dir in kwargs["tls"].cert[0]
|
||||
assert cert_dir in kwargs["tls"].cert[1]
|
||||
finally:
|
||||
if temp_dir:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
def test_kwargs_from_env_alternate_env(self) -> None:
|
||||
# Values in os.environ are entirely ignored if an alternate is
|
||||
# provided
|
||||
os.environ.update(
|
||||
DOCKER_HOST="tcp://192.168.59.103:2376",
|
||||
DOCKER_CERT_PATH=TEST_CERT_DIR,
|
||||
DOCKER_TLS_VERIFY="",
|
||||
)
|
||||
kwargs = kwargs_from_env(
|
||||
environment={
|
||||
"DOCKER_HOST": "http://docker.gensokyo.jp:2581",
|
||||
}
|
||||
)
|
||||
assert kwargs["base_url"] == "http://docker.gensokyo.jp:2581"
|
||||
assert "tls" not in kwargs
|
||||
|
||||
|
||||
class ConverVolumeBindsTest(unittest.TestCase):
|
||||
def test_convert_volume_binds_empty(self) -> None:
|
||||
assert convert_volume_binds({}) == []
|
||||
assert convert_volume_binds([]) == []
|
||||
|
||||
def test_convert_volume_binds_list(self) -> None:
|
||||
data = ["/a:/a:ro", "/b:/c:z"]
|
||||
assert convert_volume_binds(data) == data
|
||||
|
||||
def test_convert_volume_binds_complete(self) -> None:
|
||||
data: dict[str | bytes, dict[str, str]] = {
|
||||
"/mnt/vol1": {"bind": "/data", "mode": "ro"}
|
||||
}
|
||||
assert convert_volume_binds(data) == ["/mnt/vol1:/data:ro"]
|
||||
|
||||
def test_convert_volume_binds_compact(self) -> None:
|
||||
data: dict[str | bytes, str] = {"/mnt/vol1": "/data"}
|
||||
assert convert_volume_binds(data) == ["/mnt/vol1:/data:rw"]
|
||||
|
||||
def test_convert_volume_binds_no_mode(self) -> None:
|
||||
data: dict[str | bytes, dict[str, str]] = {"/mnt/vol1": {"bind": "/data"}}
|
||||
assert convert_volume_binds(data) == ["/mnt/vol1:/data:rw"]
|
||||
|
||||
def test_convert_volume_binds_unicode_bytes_input(self) -> None:
|
||||
expected = ["/mnt/지연:/unicode/박:rw"]
|
||||
|
||||
data: dict[str | bytes, dict[str, str | bytes]] = {
|
||||
"/mnt/지연".encode("utf-8"): {
|
||||
"bind": "/unicode/박".encode("utf-8"),
|
||||
"mode": "rw",
|
||||
}
|
||||
}
|
||||
assert convert_volume_binds(data) == expected
|
||||
|
||||
def test_convert_volume_binds_unicode_unicode_input(self) -> None:
|
||||
expected = ["/mnt/지연:/unicode/박:rw"]
|
||||
|
||||
data: dict[str | bytes, dict[str, str]] = {
|
||||
"/mnt/지연": {"bind": "/unicode/박", "mode": "rw"}
|
||||
}
|
||||
assert convert_volume_binds(data) == expected
|
||||
|
||||
|
||||
class ParseEnvFileTest(unittest.TestCase):
|
||||
def generate_tempfile(self, file_content: str) -> str:
|
||||
"""
|
||||
Generates a temporary file for tests with the content
|
||||
of 'file_content' and returns the filename.
|
||||
Don't forget to unlink the file with os.unlink() after.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(delete=False) as local_tempfile:
|
||||
local_tempfile.write(file_content.encode("UTF-8"))
|
||||
return local_tempfile.name
|
||||
|
||||
def test_parse_env_file_proper(self) -> None:
|
||||
env_file = self.generate_tempfile(file_content="USER=jdoe\nPASS=secret")
|
||||
get_parse_env_file = parse_env_file(env_file)
|
||||
assert get_parse_env_file == {"USER": "jdoe", "PASS": "secret"}
|
||||
os.unlink(env_file)
|
||||
|
||||
def test_parse_env_file_with_equals_character(self) -> None:
|
||||
env_file = self.generate_tempfile(file_content="USER=jdoe\nPASS=sec==ret")
|
||||
get_parse_env_file = parse_env_file(env_file)
|
||||
assert get_parse_env_file == {"USER": "jdoe", "PASS": "sec==ret"}
|
||||
os.unlink(env_file)
|
||||
|
||||
def test_parse_env_file_commented_line(self) -> None:
|
||||
env_file = self.generate_tempfile(file_content="USER=jdoe\n#PASS=secret")
|
||||
get_parse_env_file = parse_env_file(env_file)
|
||||
assert get_parse_env_file == {"USER": "jdoe"}
|
||||
os.unlink(env_file)
|
||||
|
||||
def test_parse_env_file_newline(self) -> None:
|
||||
env_file = self.generate_tempfile(file_content="\nUSER=jdoe\n\n\nPASS=secret")
|
||||
get_parse_env_file = parse_env_file(env_file)
|
||||
assert get_parse_env_file == {"USER": "jdoe", "PASS": "secret"}
|
||||
os.unlink(env_file)
|
||||
|
||||
def test_parse_env_file_invalid_line(self) -> None:
|
||||
env_file = self.generate_tempfile(file_content="USER jdoe")
|
||||
with pytest.raises(DockerException):
|
||||
parse_env_file(env_file)
|
||||
os.unlink(env_file)
|
||||
|
||||
|
||||
class ParseHostTest(unittest.TestCase):
|
||||
def test_parse_host(self) -> None:
|
||||
invalid_hosts = [
|
||||
"foo://0.0.0.0",
|
||||
"tcp://",
|
||||
"udp://127.0.0.1",
|
||||
"udp://127.0.0.1:2375",
|
||||
"ssh://:22/path",
|
||||
"tcp://netloc:3333/path?q=1",
|
||||
"unix:///sock/path#fragment",
|
||||
"https://netloc:3333/path;params",
|
||||
"ssh://:clearpassword@host:22",
|
||||
]
|
||||
|
||||
valid_hosts = {
|
||||
"0.0.0.1:5555": "http://0.0.0.1:5555",
|
||||
":6666": "http://127.0.0.1:6666",
|
||||
"tcp://:7777": "http://127.0.0.1:7777",
|
||||
"http://:7777": "http://127.0.0.1:7777",
|
||||
"https://kokia.jp:2375": "https://kokia.jp:2375",
|
||||
"unix:///var/run/docker.sock": "http+unix:///var/run/docker.sock",
|
||||
"unix://": "http+unix:///var/run/docker.sock",
|
||||
"12.234.45.127:2375/docker/engine": (
|
||||
"http://12.234.45.127:2375/docker/engine"
|
||||
),
|
||||
"somehost.net:80/service/swarm": ("http://somehost.net:80/service/swarm"),
|
||||
"npipe:////./pipe/docker_engine": "npipe:////./pipe/docker_engine",
|
||||
"[fd12::82d1]:2375": "http://[fd12::82d1]:2375",
|
||||
"https://[fd12:5672::12aa]:1090": "https://[fd12:5672::12aa]:1090",
|
||||
"[fd12::82d1]:2375/docker/engine": (
|
||||
"http://[fd12::82d1]:2375/docker/engine"
|
||||
),
|
||||
"ssh://[fd12::82d1]": "ssh://[fd12::82d1]:22",
|
||||
"ssh://user@[fd12::82d1]:8765": "ssh://user@[fd12::82d1]:8765",
|
||||
"ssh://": "ssh://127.0.0.1:22",
|
||||
"ssh://user@localhost:22": "ssh://user@localhost:22",
|
||||
"ssh://user@remote": "ssh://user@remote:22",
|
||||
}
|
||||
|
||||
for host in invalid_hosts:
|
||||
msg = f"Should have failed to parse invalid host: {host}"
|
||||
with self.assertRaises(DockerException, msg=msg):
|
||||
parse_host(host)
|
||||
|
||||
for host, expected in valid_hosts.items():
|
||||
self.assertEqual(
|
||||
parse_host(host),
|
||||
expected,
|
||||
msg=f"Failed to parse valid host: {host}",
|
||||
)
|
||||
|
||||
def test_parse_host_empty_value(self) -> None:
|
||||
unix_socket = "http+unix:///var/run/docker.sock"
|
||||
npipe = "npipe:////./pipe/docker_engine"
|
||||
|
||||
for val in [None, ""]:
|
||||
assert parse_host(val, is_win32=False) == unix_socket
|
||||
assert parse_host(val, is_win32=True) == npipe
|
||||
|
||||
def test_parse_host_tls(self) -> None:
|
||||
host_value = "myhost.docker.net:3348"
|
||||
expected_result = "https://myhost.docker.net:3348"
|
||||
assert parse_host(host_value, tls=True) == expected_result
|
||||
|
||||
def test_parse_host_tls_tcp_proto(self) -> None:
|
||||
host_value = "tcp://myhost.docker.net:3348"
|
||||
expected_result = "https://myhost.docker.net:3348"
|
||||
assert parse_host(host_value, tls=True) == expected_result
|
||||
|
||||
def test_parse_host_trailing_slash(self) -> None:
|
||||
host_value = "tcp://myhost.docker.net:2376/"
|
||||
expected_result = "http://myhost.docker.net:2376"
|
||||
assert parse_host(host_value) == expected_result
|
||||
|
||||
|
||||
class ParseRepositoryTagTest(unittest.TestCase):
|
||||
sha = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
|
||||
def test_index_image_no_tag(self) -> None:
|
||||
assert parse_repository_tag("root") == ("root", None)
|
||||
|
||||
def test_index_image_tag(self) -> None:
|
||||
assert parse_repository_tag("root:tag") == ("root", "tag")
|
||||
|
||||
def test_index_user_image_no_tag(self) -> None:
|
||||
assert parse_repository_tag("user/repo") == ("user/repo", None)
|
||||
|
||||
def test_index_user_image_tag(self) -> None:
|
||||
assert parse_repository_tag("user/repo:tag") == ("user/repo", "tag")
|
||||
|
||||
def test_private_reg_image_no_tag(self) -> None:
|
||||
assert parse_repository_tag("url:5000/repo") == ("url:5000/repo", None)
|
||||
|
||||
def test_private_reg_image_tag(self) -> None:
|
||||
assert parse_repository_tag("url:5000/repo:tag") == ("url:5000/repo", "tag")
|
||||
|
||||
def test_index_image_sha(self) -> None:
|
||||
assert parse_repository_tag(f"root@sha256:{self.sha}") == (
|
||||
"root",
|
||||
f"sha256:{self.sha}",
|
||||
)
|
||||
|
||||
def test_private_reg_image_sha(self) -> None:
|
||||
assert parse_repository_tag(f"url:5000/repo@sha256:{self.sha}") == (
|
||||
"url:5000/repo",
|
||||
f"sha256:{self.sha}",
|
||||
)
|
||||
|
||||
|
||||
class ParseDeviceTest(unittest.TestCase):
|
||||
def test_dict(self) -> None:
|
||||
devices = parse_devices(
|
||||
[
|
||||
{
|
||||
"PathOnHost": "/dev/sda1",
|
||||
"PathInContainer": "/dev/mnt1",
|
||||
"CgroupPermissions": "r",
|
||||
}
|
||||
]
|
||||
)
|
||||
assert devices[0] == {
|
||||
"PathOnHost": "/dev/sda1",
|
||||
"PathInContainer": "/dev/mnt1",
|
||||
"CgroupPermissions": "r",
|
||||
}
|
||||
|
||||
def test_partial_string_definition(self) -> None:
|
||||
devices = parse_devices(["/dev/sda1"])
|
||||
assert devices[0] == {
|
||||
"PathOnHost": "/dev/sda1",
|
||||
"PathInContainer": "/dev/sda1",
|
||||
"CgroupPermissions": "rwm",
|
||||
}
|
||||
|
||||
def test_permissionless_string_definition(self) -> None:
|
||||
devices = parse_devices(["/dev/sda1:/dev/mnt1"])
|
||||
assert devices[0] == {
|
||||
"PathOnHost": "/dev/sda1",
|
||||
"PathInContainer": "/dev/mnt1",
|
||||
"CgroupPermissions": "rwm",
|
||||
}
|
||||
|
||||
def test_full_string_definition(self) -> None:
|
||||
devices = parse_devices(["/dev/sda1:/dev/mnt1:r"])
|
||||
assert devices[0] == {
|
||||
"PathOnHost": "/dev/sda1",
|
||||
"PathInContainer": "/dev/mnt1",
|
||||
"CgroupPermissions": "r",
|
||||
}
|
||||
|
||||
def test_hybrid_list(self) -> None:
|
||||
devices = parse_devices(
|
||||
[
|
||||
"/dev/sda1:/dev/mnt1:rw",
|
||||
{
|
||||
"PathOnHost": "/dev/sda2",
|
||||
"PathInContainer": "/dev/mnt2",
|
||||
"CgroupPermissions": "r",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
assert devices[0] == {
|
||||
"PathOnHost": "/dev/sda1",
|
||||
"PathInContainer": "/dev/mnt1",
|
||||
"CgroupPermissions": "rw",
|
||||
}
|
||||
assert devices[1] == {
|
||||
"PathOnHost": "/dev/sda2",
|
||||
"PathInContainer": "/dev/mnt2",
|
||||
"CgroupPermissions": "r",
|
||||
}
|
||||
|
||||
|
||||
class ParseBytesTest(unittest.TestCase):
|
||||
def test_parse_bytes_valid(self) -> None:
|
||||
assert parse_bytes("512MB") == 536870912
|
||||
assert parse_bytes("512M") == 536870912
|
||||
assert parse_bytes("512m") == 536870912
|
||||
|
||||
def test_parse_bytes_invalid(self) -> None:
|
||||
with pytest.raises(DockerException):
|
||||
parse_bytes("512MK")
|
||||
with pytest.raises(DockerException):
|
||||
parse_bytes("512L")
|
||||
with pytest.raises(DockerException):
|
||||
parse_bytes("127.0.0.1K")
|
||||
|
||||
def test_parse_bytes_float(self) -> None:
|
||||
assert parse_bytes("1.5k") == 1536
|
||||
|
||||
|
||||
class UtilsTest(unittest.TestCase):
|
||||
longMessage = True
|
||||
|
||||
def test_convert_filters(self) -> None:
|
||||
tests: list[tuple[dict[str, bool | str | int | list[str | int]], str]] = [
|
||||
({"dangling": True}, '{"dangling": ["true"]}'),
|
||||
({"dangling": "true"}, '{"dangling": ["true"]}'),
|
||||
({"exited": 0}, '{"exited": ["0"]}'),
|
||||
({"exited": [0, 1]}, '{"exited": ["0", "1"]}'),
|
||||
]
|
||||
|
||||
for filters, expected in tests:
|
||||
assert convert_filters(filters) == expected
|
||||
|
||||
def test_decode_json_header(self) -> None:
|
||||
obj = {"a": "b", "c": 1}
|
||||
data = base64.urlsafe_b64encode(bytes(json.dumps(obj), "utf-8"))
|
||||
decoded_data = decode_json_header(data)
|
||||
assert obj == decoded_data
|
||||
|
||||
|
||||
class SplitCommandTest(unittest.TestCase):
|
||||
def test_split_command_with_unicode(self) -> None:
|
||||
assert split_command("echo μμ") == ["echo", "μμ"]
|
||||
|
||||
|
||||
class FormatEnvironmentTest(unittest.TestCase):
|
||||
def test_format_env_binary_unicode_value(self) -> None:
|
||||
env_dict = {"ARTIST_NAME": b"\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80"}
|
||||
assert format_environment(env_dict) == ["ARTIST_NAME=송지은"]
|
||||
|
||||
def test_format_env_no_value(self) -> None:
|
||||
env_dict = {
|
||||
"FOO": None,
|
||||
"BAR": "",
|
||||
}
|
||||
assert sorted(format_environment(env_dict)) == ["BAR=", "FOO"]
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# This code is part of the Ansible collection community.docker, but is an independent component.
|
||||
# This particular file, and this file only, is based on the Docker SDK for Python (https://github.com/docker/docker-py/)
|
||||
#
|
||||
# Copyright (c) 2016-2022 Docker, Inc.
|
||||
#
|
||||
# It is licensed under the Apache 2.0 license (see LICENSES/Apache-2.0.txt in this collection)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,591 @@
|
|||
# Copyright 2022 Red Hat | Ansible
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._compose_v2 import (
|
||||
Event,
|
||||
parse_events,
|
||||
parse_json_events,
|
||||
)
|
||||
|
||||
from .compose_v2_test_cases import EVENT_TEST_CASES
|
||||
|
||||
EXTRA_TEST_CASES: list[tuple[str, str, bool, bool, str, list[Event], list[str]]] = [
|
||||
(
|
||||
"2.24.2-manual-build-dry-run",
|
||||
"2.24.2",
|
||||
True,
|
||||
False,
|
||||
" DRY-RUN MODE - build service foobar \n"
|
||||
" DRY-RUN MODE - ==> ==> writing image dryRun-8843d7f92416211de9ebb963ff4ce28125932878 \n"
|
||||
" DRY-RUN MODE - ==> ==> naming to my-python \n"
|
||||
" DRY-RUN MODE - Network compose_default Creating\n"
|
||||
" DRY-RUN MODE - Network compose_default Created\n"
|
||||
" DRY-RUN MODE - Container compose-foobar-1 Creating\n"
|
||||
" DRY-RUN MODE - Container compose-foobar-1 Created\n"
|
||||
" DRY-RUN MODE - Container ompose-foobar-1 Starting\n"
|
||||
" DRY-RUN MODE - Container ompose-foobar-1 Started\n",
|
||||
[
|
||||
Event(
|
||||
"service",
|
||||
"foobar",
|
||||
"Building",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"network",
|
||||
"compose_default",
|
||||
"Creating",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"network",
|
||||
"compose_default",
|
||||
"Created",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"compose-foobar-1",
|
||||
"Creating",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"compose-foobar-1",
|
||||
"Created",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"ompose-foobar-1",
|
||||
"Starting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"ompose-foobar-1",
|
||||
"Started",
|
||||
None,
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
(
|
||||
# https://github.com/ansible-collections/community.docker/issues/785
|
||||
"2.20.0-manual-pull",
|
||||
"2.20.0",
|
||||
False,
|
||||
False,
|
||||
"4f4fb700ef54 Waiting\n"
|
||||
"238022553356 Downloading 541B/541B\n"
|
||||
"972e292d3a60 Downloading 106kB/10.43MB\n"
|
||||
"f2543dc9f0a9 Downloading 25.36kB/2.425MB\n"
|
||||
"972e292d3a60 Downloading 5.925MB/10.43MB\n"
|
||||
"f2543dc9f0a9 Downloading 2.219MB/2.425MB\n"
|
||||
"f2543dc9f0a9 Extracting 32.77kB/2.425MB\n"
|
||||
"4f4fb700ef54 Downloading 32B/32B\n"
|
||||
"f2543dc9f0a9 Extracting 2.425MB/2.425MB\n"
|
||||
"972e292d3a60 Extracting 131.1kB/10.43MB\n"
|
||||
"972e292d3a60 Extracting 10.43MB/10.43MB\n"
|
||||
"238022553356 Extracting 541B/541B\n"
|
||||
"4f4fb700ef54 Extracting 32B/32B\n",
|
||||
[
|
||||
Event(
|
||||
"image-layer",
|
||||
"4f4fb700ef54",
|
||||
"Waiting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"238022553356",
|
||||
"Downloading",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"972e292d3a60",
|
||||
"Downloading",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"f2543dc9f0a9",
|
||||
"Downloading",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"972e292d3a60",
|
||||
"Downloading",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"f2543dc9f0a9",
|
||||
"Downloading",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"f2543dc9f0a9",
|
||||
"Extracting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"4f4fb700ef54",
|
||||
"Downloading",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"f2543dc9f0a9",
|
||||
"Extracting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"972e292d3a60",
|
||||
"Extracting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"972e292d3a60",
|
||||
"Extracting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"238022553356",
|
||||
"Extracting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"4f4fb700ef54",
|
||||
"Extracting",
|
||||
None,
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
(
|
||||
# https://github.com/ansible-collections/community.docker/issues/787
|
||||
"2.20.3-logrus-warn",
|
||||
"2.20.3",
|
||||
False,
|
||||
False,
|
||||
'time="2024-02-02T08:14:10+01:00" level=warning msg="a network with name influxNetwork exists but was not'
|
||||
' created for project \\"influxdb\\".\\nSet `external: true` to use an existing network"\n',
|
||||
[],
|
||||
[
|
||||
'a network with name influxNetwork exists but was not created for project "influxdb".\nSet `external: true` to use an existing network',
|
||||
],
|
||||
),
|
||||
(
|
||||
# https://github.com/ansible-collections/community.docker/issues/807
|
||||
"2.20.3-image-warning-error",
|
||||
"2.20.3",
|
||||
False,
|
||||
True,
|
||||
" dummy3 Warning \n"
|
||||
" dummy2 Warning \n"
|
||||
" dummy Error \n"
|
||||
" dummy4 Warning Foo bar \n"
|
||||
" dummy5 Error Bar baz bam \n",
|
||||
[
|
||||
Event(
|
||||
"unknown",
|
||||
"dummy",
|
||||
"Error",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"unknown",
|
||||
"dummy5",
|
||||
"Error",
|
||||
"Bar baz bam",
|
||||
),
|
||||
],
|
||||
[
|
||||
"Unspecified warning for dummy3",
|
||||
"Unspecified warning for dummy2",
|
||||
"dummy4: Foo bar",
|
||||
],
|
||||
),
|
||||
(
|
||||
# https://github.com/ansible-collections/community.docker/issues/911
|
||||
"2.28.1-image-pull-skipped",
|
||||
"2.28.1",
|
||||
False,
|
||||
False,
|
||||
# fmt: off
|
||||
" bash_1 Skipped \n bash_2 Pulling \n bash_2 Pulled \n",
|
||||
# fmt: on
|
||||
[
|
||||
Event(
|
||||
"unknown",
|
||||
"bash_1",
|
||||
"Skipped",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"service",
|
||||
"bash_2",
|
||||
"Pulling",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"service",
|
||||
"bash_2",
|
||||
"Pulled",
|
||||
None,
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
(
|
||||
# https://github.com/ansible-collections/community.docker/issues/948
|
||||
"2.28.1-unknown", # TODO: find out actual version!
|
||||
"2.28.1", # TODO: find out actual version!
|
||||
False,
|
||||
True,
|
||||
" prometheus Pulling \n"
|
||||
" prometheus Pulled \n"
|
||||
'network internet-monitoring-front-tier was found but has incorrect label com.docker.compose.network set to "internet-monitoring-front-tier"\n',
|
||||
[
|
||||
Event(
|
||||
"service",
|
||||
"prometheus",
|
||||
"Pulling",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"service",
|
||||
"prometheus",
|
||||
"Pulled",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"unknown",
|
||||
"",
|
||||
"Error",
|
||||
'network internet-monitoring-front-tier was found but has incorrect label com.docker.compose.network set to "internet-monitoring-front-tier"',
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
(
|
||||
# https://github.com/ansible-collections/community.docker/issues/978
|
||||
"2.28.1-unknown", # TODO: find out actual version!
|
||||
"2.28.1", # TODO: find out actual version!
|
||||
False,
|
||||
True,
|
||||
" Network create_users_db_default Creating\n"
|
||||
" Network create_users_db_default Created\n"
|
||||
" Container create_users_db-init Creating\n"
|
||||
" Container create_users_db-init Created\n"
|
||||
" Container create_users_db-init Starting\n"
|
||||
" Container create_users_db-init Started\n"
|
||||
" Container create_users_db-init Waiting\n"
|
||||
"container create_users_db-init exited (0)\n",
|
||||
[
|
||||
Event(
|
||||
"network",
|
||||
"create_users_db_default",
|
||||
"Creating",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"network",
|
||||
"create_users_db_default",
|
||||
"Created",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"create_users_db-init",
|
||||
"Creating",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"create_users_db-init",
|
||||
"Created",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"create_users_db-init",
|
||||
"Starting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"create_users_db-init",
|
||||
"Started",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"container",
|
||||
"create_users_db-init",
|
||||
"Waiting",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"unknown",
|
||||
"",
|
||||
"Error",
|
||||
"container create_users_db-init exited (0)",
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
]
|
||||
|
||||
_ALL_TEST_CASES = EVENT_TEST_CASES + EXTRA_TEST_CASES
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"test_id, compose_version, dry_run, nonzero_rc, stderr, events, warnings",
|
||||
_ALL_TEST_CASES,
|
||||
ids=[tc[0] for tc in _ALL_TEST_CASES],
|
||||
)
|
||||
def test_parse_events(
|
||||
test_id: str,
|
||||
compose_version: str,
|
||||
dry_run: bool,
|
||||
nonzero_rc: bool,
|
||||
stderr: str,
|
||||
events: list[Event],
|
||||
warnings: list[str],
|
||||
) -> None:
|
||||
collected_warnings = []
|
||||
|
||||
def collect_warning(msg: str) -> None:
|
||||
collected_warnings.append(msg)
|
||||
|
||||
collected_events = parse_events(
|
||||
stderr.encode("utf-8"),
|
||||
dry_run=dry_run,
|
||||
warn_function=collect_warning,
|
||||
nonzero_rc=nonzero_rc,
|
||||
)
|
||||
|
||||
print(collected_events)
|
||||
print(collected_warnings)
|
||||
|
||||
assert collected_events == events
|
||||
assert collected_warnings == warnings
|
||||
|
||||
|
||||
JSON_TEST_CASES: list[tuple[str, str, str, list[Event], list[str]]] = [
|
||||
(
|
||||
"pull-compose-2",
|
||||
"2.40.3",
|
||||
'{"level":"warning","msg":"/tmp/ansible.f9pcm_i3.test/ansible-docker-test-3c46cd06-pull/docker-compose.yml: the attribute `version`'
|
||||
' is obsolete, it will be ignored, please remove it to avoid potential confusion","time":"2025-12-06T13:16:30Z"}\n'
|
||||
'{"id":"ansible-docker-test-3c46cd06-cont","text":"Pulling"}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Pulling fs layer"}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Downloading","status":"[\\u003e '
|
||||
' ] 6.89kB/599.9kB","current":6890,"total":599883,"percent":1}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Download complete","percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Extracting","status":"[==\\u003e '
|
||||
' ] 32.77kB/599.9kB","current":32768,"total":599883,"percent":5}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Extracting","status":"[============'
|
||||
'======================================\\u003e] 599.9kB/599.9kB","current":599883,"total":599883,"percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Extracting","status":"[============'
|
||||
'======================================\\u003e] 599.9kB/599.9kB","current":599883,"total":599883,"percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"ansible-docker-test-3c46cd06-cont","text":"Pull complete","percent":100}\n'
|
||||
'{"id":"ansible-docker-test-3c46cd06-cont","text":"Pulled"}\n',
|
||||
[
|
||||
Event(
|
||||
"unknown",
|
||||
None,
|
||||
"Warning",
|
||||
"/tmp/ansible.f9pcm_i3.test/ansible-docker-test-3c46cd06-pull/docker-compose.yml: the attribute `version` is obsolete,"
|
||||
" it will be ignored, please remove it to avoid potential confusion",
|
||||
),
|
||||
Event(
|
||||
"image",
|
||||
"ansible-docker-test-3c46cd06-cont",
|
||||
"Pulling",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Pulling fs layer",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Downloading",
|
||||
"[> ] 6.89kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Download complete",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Extracting",
|
||||
"[==> ] 32.77kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Extracting",
|
||||
"[==================================================>] 599.9kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Extracting",
|
||||
"[==================================================>] 599.9kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"63a26ae4e8a8",
|
||||
"Pull complete",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image",
|
||||
"ansible-docker-test-3c46cd06-cont",
|
||||
"Pulled",
|
||||
None,
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
(
|
||||
"pull-compose-5",
|
||||
"5.0.0",
|
||||
'{"level":"warning","msg":"/tmp/ansible.1n0q46aj.test/ansible-docker-test-b2fa9191-pull/docker-compose.yml: the attribute'
|
||||
' `version` is obsolete, it will be ignored, please remove it to avoid potential confusion","time":"2025-12-06T13:08:22Z"}\n'
|
||||
'{"id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working","text":"Pulling"}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working"}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working","text":"[\\u003e '
|
||||
' ] 6.89kB/599.9kB","current":6890,"total":599883,"percent":1}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working","text":"[=============='
|
||||
'====================================\\u003e] 599.9kB/599.9kB","current":599883,"total":599883,"percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working"}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Done","percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working","text":"[==\\u003e '
|
||||
' ] 32.77kB/599.9kB","current":32768,"total":599883,"percent":5}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working","text":"[=============='
|
||||
'====================================\\u003e] 599.9kB/599.9kB","current":599883,"total":599883,"percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Working","text":"[=============='
|
||||
'====================================\\u003e] 599.9kB/599.9kB","current":599883,"total":599883,"percent":100}\n'
|
||||
'{"id":"63a26ae4e8a8","parent_id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Done","percent":100}\n'
|
||||
'{"id":"Image ghcr.io/ansible-collections/simple-1:tag","status":"Done","text":"Pulled"}\n',
|
||||
[
|
||||
Event(
|
||||
"unknown",
|
||||
None,
|
||||
"Warning",
|
||||
"/tmp/ansible.1n0q46aj.test/ansible-docker-test-b2fa9191-pull/docker-compose.yml: the attribute `version`"
|
||||
" is obsolete, it will be ignored, please remove it to avoid potential confusion",
|
||||
),
|
||||
Event(
|
||||
"image",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Pulling",
|
||||
"Working",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
"[> ] 6.89kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
"[==================================================>] 599.9kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
None,
|
||||
),
|
||||
Event(
|
||||
"image-layer", "ghcr.io/ansible-collections/simple-1:tag", "Done", None
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
"[==> ] 32.77kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
"[==================================================>] 599.9kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer",
|
||||
"ghcr.io/ansible-collections/simple-1:tag",
|
||||
"Working",
|
||||
"[==================================================>] 599.9kB/599.9kB",
|
||||
),
|
||||
Event(
|
||||
"image-layer", "ghcr.io/ansible-collections/simple-1:tag", "Done", None
|
||||
),
|
||||
Event(
|
||||
"image", "ghcr.io/ansible-collections/simple-1:tag", "Pulled", "Done"
|
||||
),
|
||||
],
|
||||
[],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"test_id, compose_version, stderr, events, warnings",
|
||||
JSON_TEST_CASES,
|
||||
ids=[tc[0] for tc in JSON_TEST_CASES],
|
||||
)
|
||||
def test_parse_json_events(
|
||||
test_id: str,
|
||||
compose_version: str,
|
||||
stderr: str,
|
||||
events: list[Event],
|
||||
warnings: list[str],
|
||||
) -> None:
|
||||
collected_warnings = []
|
||||
|
||||
def collect_warning(msg: str) -> None:
|
||||
collected_warnings.append(msg)
|
||||
|
||||
collected_events = parse_json_events(
|
||||
stderr.encode("utf-8"),
|
||||
warn_function=collect_warning,
|
||||
)
|
||||
|
||||
print(collected_events)
|
||||
print(collected_warnings)
|
||||
|
||||
assert collected_events == events
|
||||
assert collected_warnings == warnings
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
# Copyright 2022 Red Hat | Ansible
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._copy import (
|
||||
_stream_generator_to_fileobj,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
def _simple_generator(sequence: Sequence[T]) -> t.Generator[T]:
|
||||
yield from sequence
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"chunks, read_sizes",
|
||||
[
|
||||
(
|
||||
[
|
||||
(1, b"1"),
|
||||
(1, b"2"),
|
||||
(1, b"3"),
|
||||
(1, b"4"),
|
||||
],
|
||||
[
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
),
|
||||
(
|
||||
[
|
||||
(1, b"123"),
|
||||
(1, b"456"),
|
||||
(1, b"789"),
|
||||
],
|
||||
[
|
||||
1,
|
||||
4,
|
||||
2,
|
||||
2,
|
||||
2,
|
||||
],
|
||||
),
|
||||
(
|
||||
[
|
||||
(10 * 1024 * 1024, b"0"),
|
||||
(10 * 1024 * 1024, b"1"),
|
||||
],
|
||||
[
|
||||
1024 * 1024 - 5,
|
||||
5 * 1024 * 1024 - 3,
|
||||
10 * 1024 * 1024 - 2,
|
||||
2 * 1024 * 1024 - 1,
|
||||
2 * 1024 * 1024 + 5 + 3 + 2 + 1,
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test__stream_generator_to_fileobj(
|
||||
chunks: list[tuple[int, bytes]], read_sizes: list[int]
|
||||
) -> None:
|
||||
data_chunks = [count * data for count, data in chunks]
|
||||
stream = _simple_generator(data_chunks)
|
||||
expected = b"".join(data_chunks)
|
||||
|
||||
buffer = b""
|
||||
totally_read = 0
|
||||
f = _stream_generator_to_fileobj(stream)
|
||||
for read_size in read_sizes:
|
||||
chunk = f.read(read_size)
|
||||
assert len(chunk) == min(read_size, len(expected) - len(buffer))
|
||||
buffer += chunk
|
||||
totally_read += read_size
|
||||
|
||||
assert buffer == expected[: len(buffer)]
|
||||
assert min(totally_read, len(expected)) == len(buffer)
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
# Copyright 2022 Red Hat | Ansible
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import tarfile
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._image_archive import (
|
||||
ImageArchiveInvalidException,
|
||||
api_image_id,
|
||||
archived_image_manifest,
|
||||
)
|
||||
|
||||
from ..test_support.docker_image_archive_stubbing import (
|
||||
write_imitation_archive,
|
||||
write_imitation_archive_with_manifest,
|
||||
write_irrelevant_tar,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="tar_file_name")
|
||||
def tar_file_name_fixture(tmpdir: t.Any) -> str:
|
||||
"""
|
||||
Return the name of a non-existing tar file in an existing temporary directory.
|
||||
"""
|
||||
|
||||
# Cast to str required by Python 2.x
|
||||
return str(tmpdir.join("foo.tar"))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expected, value", [("sha256:foo", "foo"), ("sha256:bar", "bar")]
|
||||
)
|
||||
def test_api_image_id_from_archive_id(expected: str, value: str) -> None:
|
||||
assert api_image_id(value) == expected
|
||||
|
||||
|
||||
def test_archived_image_manifest_extracts(tar_file_name: str) -> None:
|
||||
expected_id = "abcde12345"
|
||||
expected_tags = ["foo:latest", "bar:v1"]
|
||||
|
||||
write_imitation_archive(tar_file_name, expected_id, expected_tags)
|
||||
|
||||
actual = archived_image_manifest(tar_file_name)
|
||||
|
||||
assert actual is not None
|
||||
assert actual.image_id == expected_id
|
||||
assert actual.repo_tags == expected_tags
|
||||
|
||||
|
||||
def test_archived_image_manifest_extracts_nothing_when_file_not_present(
|
||||
tar_file_name: str,
|
||||
) -> None:
|
||||
image_id = archived_image_manifest(tar_file_name)
|
||||
|
||||
assert image_id is None
|
||||
|
||||
|
||||
def test_archived_image_manifest_raises_when_file_not_a_tar() -> None:
|
||||
try:
|
||||
archived_image_manifest(__file__)
|
||||
raise AssertionError()
|
||||
except ImageArchiveInvalidException as e:
|
||||
assert isinstance(e.__cause__, tarfile.ReadError)
|
||||
assert str(__file__) in str(e)
|
||||
|
||||
|
||||
def test_archived_image_manifest_raises_when_tar_missing_manifest(
|
||||
tar_file_name: str,
|
||||
) -> None:
|
||||
write_irrelevant_tar(tar_file_name)
|
||||
|
||||
try:
|
||||
archived_image_manifest(tar_file_name)
|
||||
raise AssertionError()
|
||||
except ImageArchiveInvalidException as e:
|
||||
assert isinstance(e.__cause__, KeyError)
|
||||
assert "manifest.json" in str(e.__cause__)
|
||||
|
||||
|
||||
def test_archived_image_manifest_raises_when_manifest_missing_id(
|
||||
tar_file_name: str,
|
||||
) -> None:
|
||||
manifest = [{"foo": "bar"}]
|
||||
|
||||
write_imitation_archive_with_manifest(tar_file_name, manifest)
|
||||
|
||||
try:
|
||||
archived_image_manifest(tar_file_name)
|
||||
raise AssertionError()
|
||||
except ImageArchiveInvalidException as e:
|
||||
assert isinstance(e.__cause__, KeyError)
|
||||
assert "Config" in str(e.__cause__)
|
||||
|
|
@ -0,0 +1,271 @@
|
|||
# Copyright (c) 2025 Felix Fontein
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._image_name import (
|
||||
ImageName,
|
||||
is_digest,
|
||||
is_tag,
|
||||
)
|
||||
|
||||
TEST_IS_DIGEST: list[tuple[str, dict[str, t.Any], bool]] = [
|
||||
("", {}, False),
|
||||
("", {"allow_empty": True}, True),
|
||||
("sha256:abc", {}, False),
|
||||
(f"sha256:{'a' * 63}", {}, False),
|
||||
(f"sha256:{'a' * 64}", {}, True),
|
||||
(f"sha256:{'a' * 65}", {}, False),
|
||||
(
|
||||
"sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
{},
|
||||
True,
|
||||
),
|
||||
("1.25.3", {}, False),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("name, kwargs, expected", TEST_IS_DIGEST)
|
||||
def test_is_digest(name: str, kwargs: dict[str, t.Any], expected: bool) -> None:
|
||||
assert is_digest(name, **kwargs) == expected
|
||||
|
||||
|
||||
TEST_IS_TAG: list[tuple[str, dict[str, t.Any], bool]] = [
|
||||
("", {}, False),
|
||||
("", {"allow_empty": True}, True),
|
||||
("foo", {}, True),
|
||||
("-foo", {}, False),
|
||||
("f" * 128, {}, True),
|
||||
("f" * 129, {}, False),
|
||||
(
|
||||
"sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
{},
|
||||
False,
|
||||
),
|
||||
("1.25.3", {}, True),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("name, kwargs, expected", TEST_IS_TAG)
|
||||
def test_is_tag(name: str, kwargs: dict[str, t.Any], expected: bool) -> None:
|
||||
assert is_tag(name, **kwargs) == expected
|
||||
|
||||
|
||||
TEST_IMAGE_NAME_VALIDATE_SUCCESS: list[ImageName] = [
|
||||
ImageName(registry="localhost", path="nginx", tag=None, digest=None),
|
||||
ImageName(registry=None, path="nginx", tag="1.25.3", digest=None),
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag=None,
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag="1.25.3",
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data", TEST_IMAGE_NAME_VALIDATE_SUCCESS)
|
||||
def test_imagename_validate_success(data: ImageName) -> None:
|
||||
assert data.validate() is data
|
||||
|
||||
|
||||
TEST_IMAGE_NAME_VALIDATE_FAILED: list[tuple[ImageName, str]] = [
|
||||
(
|
||||
ImageName(registry="-foo", path="", tag=None, digest=None),
|
||||
'Invalid registry name (-foo): must not begin or end with a "-".',
|
||||
),
|
||||
(
|
||||
ImageName(registry="foo:", path="", tag=None, digest=None),
|
||||
'Invalid registry name (foo:): must not end with ":".',
|
||||
),
|
||||
(ImageName(registry=None, path="", tag=None, digest=None), "Invalid path ()."),
|
||||
(ImageName(registry=None, path="-", tag=None, digest=None), "Invalid path (-)."),
|
||||
(ImageName(registry=None, path="/", tag=None, digest=None), "Invalid path (/)."),
|
||||
(ImageName(registry=None, path="a", tag="-", digest=None), "Invalid tag (-)."),
|
||||
(ImageName(registry=None, path="a", tag=None, digest="-"), "Invalid digest (-)."),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data, expected", TEST_IMAGE_NAME_VALIDATE_FAILED)
|
||||
def test_imagename_validate_failed(data: ImageName, expected: str) -> None:
|
||||
with pytest.raises(ValueError, match=f"^{re.escape(expected)}$"):
|
||||
data.validate()
|
||||
|
||||
|
||||
TEST_IMAGE_NAME_PARSE: list[tuple[str, ImageName]] = [
|
||||
("", ImageName(registry=None, path="", tag=None, digest=None)),
|
||||
("foo", ImageName(registry=None, path="foo", tag=None, digest=None)),
|
||||
("foo:5000", ImageName(registry=None, path="foo", tag="5000", digest=None)),
|
||||
("foo:5000/", ImageName(registry="foo:5000", path="", tag=None, digest=None)),
|
||||
("foo:5000/bar", ImageName(registry="foo:5000", path="bar", tag=None, digest=None)),
|
||||
("/bar", ImageName(registry=None, path="/bar", tag=None, digest=None)),
|
||||
(
|
||||
"localhost/foo:5000",
|
||||
ImageName(registry="localhost", path="foo", tag="5000", digest=None),
|
||||
),
|
||||
(
|
||||
"foo.bar/baz:5000",
|
||||
ImageName(registry="foo.bar", path="baz", tag="5000", digest=None),
|
||||
),
|
||||
(
|
||||
"foo:bar/baz:bam:5000",
|
||||
ImageName(registry="foo:bar", path="baz:bam", tag="5000", digest=None),
|
||||
),
|
||||
("foo:bar:baz", ImageName(registry=None, path="foo:bar", tag="baz", digest=None)),
|
||||
("foo@bar@baz", ImageName(registry=None, path="foo@bar", tag=None, digest="baz")),
|
||||
("nginx:1.25.3", ImageName(registry=None, path="nginx", tag="1.25.3", digest=None)),
|
||||
(
|
||||
"nginx@sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag=None,
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
),
|
||||
(
|
||||
"nginx:1.25.3@sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag="1.25.3",
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("name, expected", TEST_IMAGE_NAME_PARSE)
|
||||
def test_imagename_parse(name: str, expected: ImageName) -> None:
|
||||
assert ImageName.parse(name) == expected
|
||||
|
||||
|
||||
TEST_IMAGE_NAME_COMBINE: list[tuple[ImageName, str]] = [
|
||||
(ImageName(registry=None, path="", tag=None, digest=None), ""),
|
||||
(ImageName(registry=None, path="nginx", tag="1.25.3", digest=None), "nginx:1.25.3"),
|
||||
(
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag=None,
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
"nginx@sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
(
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag="1.25.3",
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
"nginx:1.25.3@sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data, expected", TEST_IMAGE_NAME_COMBINE)
|
||||
def test_imagename_combine(data: ImageName, expected: str) -> None:
|
||||
assert data.combine() == expected
|
||||
|
||||
|
||||
TEST_IMAGE_NAME_NORMALIZE: list[tuple[ImageName, ImageName]] = [
|
||||
(
|
||||
ImageName(registry=None, path="", tag=None, digest=None),
|
||||
ImageName(registry="docker.io", path="", tag=None, digest=None),
|
||||
),
|
||||
(
|
||||
ImageName(registry="", path="", tag=None, digest=None),
|
||||
ImageName(registry="docker.io", path="", tag=None, digest=None),
|
||||
),
|
||||
(
|
||||
ImageName(registry="index.docker.io", path="", tag=None, digest=None),
|
||||
ImageName(registry="docker.io", path="", tag=None, digest=None),
|
||||
),
|
||||
(
|
||||
ImageName(registry="registry.hub.docker.com", path="", tag=None, digest=None),
|
||||
ImageName(registry="docker.io", path="", tag=None, digest=None),
|
||||
),
|
||||
(
|
||||
ImageName(registry=None, path="foo/bar", tag=None, digest=None),
|
||||
ImageName(registry="docker.io", path="foo/bar", tag=None, digest=None),
|
||||
),
|
||||
(
|
||||
ImageName(registry=None, path="nginx", tag="1.25.3", digest=None),
|
||||
ImageName(
|
||||
registry="docker.io", path="library/nginx", tag="1.25.3", digest=None
|
||||
),
|
||||
),
|
||||
(
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag=None,
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
ImageName(
|
||||
registry="docker.io",
|
||||
path="library/nginx",
|
||||
tag=None,
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
),
|
||||
(
|
||||
ImageName(
|
||||
registry=None,
|
||||
path="nginx",
|
||||
tag="1.25.3",
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
ImageName(
|
||||
registry="docker.io",
|
||||
path="library/nginx",
|
||||
tag="1.25.3",
|
||||
digest="sha256:d02f9b9db4d759ef27dc26b426b842ff2fb881c5c6079612d27ec36e36b132dd",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data, expected", TEST_IMAGE_NAME_NORMALIZE)
|
||||
def test_imagename_normalize(data: ImageName, expected: ImageName) -> None:
|
||||
assert data.normalize() == expected
|
||||
|
||||
|
||||
TEST_IMAGE_NAME_HOSTNAME_AND_PORT: list[tuple[ImageName, str, int]] = [
|
||||
(
|
||||
ImageName(registry="docker.io", path="", tag=None, digest=None),
|
||||
"index.docker.io",
|
||||
443,
|
||||
),
|
||||
(ImageName(registry="localhost", path="", tag=None, digest=None), "localhost", 443),
|
||||
(ImageName(registry="foo:5000", path="", tag=None, digest=None), "foo", 5000),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"data, expected_hostname, expected_port", TEST_IMAGE_NAME_HOSTNAME_AND_PORT
|
||||
)
|
||||
def test_imagename_get_hostname_and_port(
|
||||
data: ImageName, expected_hostname: str, expected_port: int
|
||||
) -> None:
|
||||
hostname, port = data.get_hostname_and_port()
|
||||
assert hostname == expected_hostname
|
||||
assert port == expected_port
|
||||
|
||||
|
||||
def test_imagename_get_hostname_and_port_fail() -> None:
|
||||
msg = "Cannot get hostname when there is no registry. Normalize first!"
|
||||
with pytest.raises(ValueError, match=f"^{re.escape(msg)}$"):
|
||||
ImageName(registry=None, path="", tag=None, digest=None).get_hostname_and_port()
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._logfmt import (
|
||||
InvalidLogFmt,
|
||||
parse_line,
|
||||
)
|
||||
|
||||
SUCCESS_TEST_CASES: list[tuple[str, dict[str, t.Any], dict[str, t.Any]]] = [
|
||||
(
|
||||
'time="2024-02-02T08:14:10+01:00" level=warning msg="a network with name influxNetwork exists but was not'
|
||||
' created for project \\"influxdb\\".\\nSet `external: true` to use an existing network"',
|
||||
{},
|
||||
{
|
||||
"time": "2024-02-02T08:14:10+01:00",
|
||||
"level": "warning",
|
||||
"msg": 'a network with name influxNetwork exists but was not created for project "influxdb".\nSet `external: true` to use an existing network',
|
||||
},
|
||||
),
|
||||
(
|
||||
'time="2024-02-02T08:14:10+01:00" level=warning msg="a network with name influxNetwork exists but was not'
|
||||
' created for project \\"influxdb\\".\\nSet `external: true` to use an existing network"',
|
||||
{"logrus_mode": True},
|
||||
{
|
||||
"time": "2024-02-02T08:14:10+01:00",
|
||||
"level": "warning",
|
||||
"msg": 'a network with name influxNetwork exists but was not created for project "influxdb".\nSet `external: true` to use an existing network',
|
||||
},
|
||||
),
|
||||
(
|
||||
'foo=bar a=14 baz="hello kitty" cool%story=bro f %^asdf',
|
||||
{},
|
||||
{
|
||||
"foo": "bar",
|
||||
"a": "14",
|
||||
"baz": "hello kitty",
|
||||
"cool%story": "bro",
|
||||
"f": None,
|
||||
"%^asdf": None,
|
||||
},
|
||||
),
|
||||
(
|
||||
'{"foo":"bar"}',
|
||||
{},
|
||||
{
|
||||
"{": None,
|
||||
"foo": None,
|
||||
":": None,
|
||||
"bar": None,
|
||||
"}": None,
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
FAILURE_TEST_CASES: list[tuple[str, dict[str, t.Any], str]] = [
|
||||
(
|
||||
'foo=bar a=14 baz="hello kitty" cool%story=bro f %^asdf',
|
||||
{"logrus_mode": True},
|
||||
'Key must always be followed by "=" in logrus mode',
|
||||
),
|
||||
(
|
||||
"{}",
|
||||
{"logrus_mode": True},
|
||||
'Key must always be followed by "=" in logrus mode',
|
||||
),
|
||||
(
|
||||
"[]",
|
||||
{"logrus_mode": True},
|
||||
'Key must always be followed by "=" in logrus mode',
|
||||
),
|
||||
(
|
||||
'{"foo=bar": "baz=bam"}',
|
||||
{"logrus_mode": True},
|
||||
'Key must always be followed by "=" in logrus mode',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("line, kwargs, result", SUCCESS_TEST_CASES)
|
||||
def test_parse_line_success(
|
||||
line: str, kwargs: dict[str, t.Any], result: dict[str, t.Any]
|
||||
) -> None:
|
||||
res = parse_line(line, **kwargs)
|
||||
print(repr(res))
|
||||
assert res == result
|
||||
|
||||
|
||||
@pytest.mark.parametrize("line, kwargs, message", FAILURE_TEST_CASES)
|
||||
def test_parse_line_failure(line: str, kwargs: dict[str, t.Any], message: str) -> None:
|
||||
with pytest.raises(InvalidLogFmt) as exc:
|
||||
parse_line(line, **kwargs)
|
||||
|
||||
print(repr(exc.value.args[0]))
|
||||
assert exc.value.args[0] == message
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
# Copyright 2022 Red Hat | Ansible
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._scramble import (
|
||||
scramble,
|
||||
unscramble,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"plaintext, key, scrambled",
|
||||
[
|
||||
("", b"0", "=S="),
|
||||
("hello", b"\x00", "=S=aGVsbG8="),
|
||||
("hello", b"\x01", "=S=aWRtbW4="),
|
||||
],
|
||||
)
|
||||
def test_scramble_unscramble(plaintext: str, key: bytes, scrambled: str) -> None:
|
||||
scrambled_ = scramble(plaintext, key)
|
||||
print(f"{scrambled_!r} == {scrambled!r}")
|
||||
assert scrambled_ == scrambled
|
||||
|
||||
plaintext_ = unscramble(scrambled, key)
|
||||
print(f"{plaintext_!r} == {plaintext!r}")
|
||||
assert plaintext_ == plaintext
|
||||
|
|
@ -0,0 +1,469 @@
|
|||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._util import (
|
||||
compare_dict_allow_more_present,
|
||||
compare_generic,
|
||||
convert_duration_to_nanosecond,
|
||||
parse_healthcheck,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
|
||||
class DAMSpec(t.TypedDict):
|
||||
av: dict[str, t.Any]
|
||||
bv: dict[str, t.Any]
|
||||
result: bool
|
||||
|
||||
class Spec(t.TypedDict):
|
||||
a: t.Any
|
||||
b: t.Any
|
||||
method: t.Literal["strict", "ignore", "allow_more_present"]
|
||||
type: t.Literal["value", "list", "set", "set(dict)", "dict"]
|
||||
result: bool
|
||||
|
||||
|
||||
DICT_ALLOW_MORE_PRESENT: list[DAMSpec] = [
|
||||
{"av": {}, "bv": {"a": 1}, "result": True},
|
||||
{"av": {"a": 1}, "bv": {"a": 1, "b": 2}, "result": True},
|
||||
{"av": {"a": 1}, "bv": {"b": 2}, "result": False},
|
||||
{"av": {"a": 1}, "bv": {"a": None, "b": 1}, "result": False},
|
||||
{"av": {"a": None}, "bv": {"b": 1}, "result": False},
|
||||
]
|
||||
|
||||
DICT_ALLOW_MORE_PRESENT_SPECS: list[Spec] = [
|
||||
{
|
||||
"a": entry["av"],
|
||||
"b": entry["bv"],
|
||||
"method": "allow_more_present",
|
||||
"type": "dict",
|
||||
"result": entry["result"],
|
||||
}
|
||||
for entry in DICT_ALLOW_MORE_PRESENT
|
||||
]
|
||||
|
||||
COMPARE_GENERIC: list[Spec] = [
|
||||
########################################################################################
|
||||
# value
|
||||
{"a": 1, "b": 2, "method": "strict", "type": "value", "result": False},
|
||||
{"a": "hello", "b": "hello", "method": "strict", "type": "value", "result": True},
|
||||
{"a": None, "b": "hello", "method": "strict", "type": "value", "result": False},
|
||||
{"a": None, "b": None, "method": "strict", "type": "value", "result": True},
|
||||
{"a": 1, "b": 2, "method": "ignore", "type": "value", "result": True},
|
||||
{"a": None, "b": 2, "method": "ignore", "type": "value", "result": True},
|
||||
########################################################################################
|
||||
# list
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
],
|
||||
"b": [
|
||||
"y",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "list",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"x",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "list",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "list",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"y",
|
||||
"x",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "list",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "list",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "list",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "list",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"z",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
"x",
|
||||
"z",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "list",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"y",
|
||||
"x",
|
||||
],
|
||||
"method": "ignore",
|
||||
"type": "list",
|
||||
"result": True,
|
||||
},
|
||||
########################################################################################
|
||||
# set
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
],
|
||||
"b": [
|
||||
"y",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"x",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"y",
|
||||
"x",
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"z",
|
||||
],
|
||||
"b": [
|
||||
"x",
|
||||
"y",
|
||||
"x",
|
||||
"z",
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
"x",
|
||||
"a",
|
||||
],
|
||||
"b": [
|
||||
"y",
|
||||
"z",
|
||||
],
|
||||
"method": "ignore",
|
||||
"type": "set",
|
||||
"result": True,
|
||||
},
|
||||
########################################################################################
|
||||
# set(dict)
|
||||
{
|
||||
"a": [
|
||||
{"x": 1},
|
||||
],
|
||||
"b": [
|
||||
{"y": 1},
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set(dict)",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1},
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set(dict)",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1, "y": 2},
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set(dict)",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1},
|
||||
{"x": 2, "y": 3},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1},
|
||||
{"x": 2, "y": 3},
|
||||
],
|
||||
"method": "strict",
|
||||
"type": "set(dict)",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1, "z": 2},
|
||||
{"x": 2, "y": 3},
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set(dict)",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1, "y": 2},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1},
|
||||
{"x": 2, "y": 3},
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set(dict)",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1, "y": 3},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1},
|
||||
{"x": 1, "y": 3, "z": 4},
|
||||
],
|
||||
"method": "allow_more_present",
|
||||
"type": "set(dict)",
|
||||
"result": True,
|
||||
},
|
||||
{
|
||||
"a": [
|
||||
{"x": 1},
|
||||
{"x": 2, "y": 3},
|
||||
],
|
||||
"b": [
|
||||
{"x": 1},
|
||||
],
|
||||
"method": "ignore",
|
||||
"type": "set(dict)",
|
||||
"result": True,
|
||||
},
|
||||
########################################################################################
|
||||
# dict
|
||||
{"a": {"x": 1}, "b": {"y": 1}, "method": "strict", "type": "dict", "result": False},
|
||||
{
|
||||
"a": {"x": 1},
|
||||
"b": {"x": 1, "y": 2},
|
||||
"method": "strict",
|
||||
"type": "dict",
|
||||
"result": False,
|
||||
},
|
||||
{"a": {"x": 1}, "b": {"x": 1}, "method": "strict", "type": "dict", "result": True},
|
||||
{
|
||||
"a": {"x": 1, "z": 2},
|
||||
"b": {"x": 1, "y": 2},
|
||||
"method": "strict",
|
||||
"type": "dict",
|
||||
"result": False,
|
||||
},
|
||||
{
|
||||
"a": {"x": 1, "z": 2},
|
||||
"b": {"x": 1, "y": 2},
|
||||
"method": "ignore",
|
||||
"type": "dict",
|
||||
"result": True,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("entry", DICT_ALLOW_MORE_PRESENT)
|
||||
def test_dict_allow_more_present(entry: DAMSpec) -> None:
|
||||
assert compare_dict_allow_more_present(entry["av"], entry["bv"]) == entry["result"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("entry", COMPARE_GENERIC + DICT_ALLOW_MORE_PRESENT_SPECS)
|
||||
def test_compare_generic(entry: Spec) -> None:
|
||||
assert (
|
||||
compare_generic(entry["a"], entry["b"], entry["method"], entry["type"])
|
||||
== entry["result"]
|
||||
)
|
||||
|
||||
|
||||
def test_convert_duration_to_nanosecond() -> None:
|
||||
nanoseconds = convert_duration_to_nanosecond("5s")
|
||||
assert nanoseconds == 5000000000
|
||||
nanoseconds = convert_duration_to_nanosecond("1m5s")
|
||||
assert nanoseconds == 65000000000
|
||||
with pytest.raises(ValueError):
|
||||
convert_duration_to_nanosecond([1, 2, 3]) # type: ignore
|
||||
with pytest.raises(ValueError):
|
||||
convert_duration_to_nanosecond("10x")
|
||||
|
||||
|
||||
def test_parse_healthcheck() -> None:
|
||||
result, disabled = parse_healthcheck(
|
||||
{
|
||||
"test": "sleep 1",
|
||||
"interval": "1s",
|
||||
}
|
||||
)
|
||||
assert disabled is False
|
||||
assert result == {"test": ["CMD-SHELL", "sleep 1"], "interval": 1000000000}
|
||||
|
||||
result, disabled = parse_healthcheck(
|
||||
{
|
||||
"test": ["NONE"],
|
||||
}
|
||||
)
|
||||
assert result is None
|
||||
assert disabled
|
||||
|
||||
result, disabled = parse_healthcheck({"test": "sleep 1", "interval": "1s423ms"})
|
||||
assert result == {"test": ["CMD-SHELL", "sleep 1"], "interval": 1423000000}
|
||||
assert disabled is False
|
||||
|
||||
result, disabled = parse_healthcheck(
|
||||
{"test": "sleep 1", "interval": "1h1m2s3ms4us"}
|
||||
)
|
||||
assert result == {"test": ["CMD-SHELL", "sleep 1"], "interval": 3662003004000}
|
||||
assert disabled is False
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
# Copyright 2025 Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.modules.docker_container_copy_into import (
|
||||
parse_modern,
|
||||
parse_octal_string_only,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, expected",
|
||||
[
|
||||
("0777", 0o777),
|
||||
("777", 0o777),
|
||||
("0o777", 0o777),
|
||||
("0755", 0o755),
|
||||
("755", 0o755),
|
||||
("0o755", 0o755),
|
||||
("0644", 0o644),
|
||||
("644", 0o644),
|
||||
("0o644", 0o644),
|
||||
(" 0644 ", 0o644),
|
||||
(" 644 ", 0o644),
|
||||
(" 0o644 ", 0o644),
|
||||
("-1", -1),
|
||||
],
|
||||
)
|
||||
def test_parse_string(value: str, expected: int) -> None:
|
||||
assert parse_modern(value) == expected
|
||||
assert parse_octal_string_only(value) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value",
|
||||
[
|
||||
0o777,
|
||||
0o755,
|
||||
0o644,
|
||||
12345,
|
||||
123456789012345678901234567890123456789012345678901234567890,
|
||||
],
|
||||
)
|
||||
def test_parse_int(value: int) -> None:
|
||||
assert parse_modern(value) == value
|
||||
with pytest.raises(TypeError, match=f"^must be an octal string, got {value}L?$"):
|
||||
parse_octal_string_only(value) # type: ignore
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value",
|
||||
[
|
||||
1.0,
|
||||
755.5,
|
||||
[],
|
||||
{},
|
||||
],
|
||||
)
|
||||
def test_parse_bad_type(value: t.Any) -> None:
|
||||
with pytest.raises(TypeError, match="^must be an octal string or an integer, got "):
|
||||
parse_modern(value)
|
||||
with pytest.raises(TypeError, match="^must be an octal string, got "):
|
||||
parse_octal_string_only(value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value",
|
||||
[
|
||||
"foo",
|
||||
"8",
|
||||
"9",
|
||||
],
|
||||
)
|
||||
def test_parse_bad_value(value: str) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
parse_modern(value)
|
||||
with pytest.raises(ValueError):
|
||||
parse_octal_string_only(value)
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
# Copyright 2022 Red Hat | Ansible
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._image_archive import (
|
||||
api_image_id,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.modules.docker_image import (
|
||||
ImageManager,
|
||||
)
|
||||
|
||||
from ..test_support.docker_image_archive_stubbing import (
|
||||
write_imitation_archive,
|
||||
write_irrelevant_tar,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
|
||||
def assert_no_logging(msg: str) -> t.NoReturn:
|
||||
raise AssertionError(f"Should not have logged anything but logged {msg}")
|
||||
|
||||
|
||||
def capture_logging(messages: list[str]) -> Callable[[str], None]:
|
||||
def capture(msg: str) -> None:
|
||||
messages.append(msg)
|
||||
|
||||
return capture
|
||||
|
||||
|
||||
@pytest.fixture(name="tar_file_name")
|
||||
def tar_file_name_fixture(tmpdir: t.Any) -> str:
|
||||
"""
|
||||
Return the name of a non-existing tar file in an existing temporary directory.
|
||||
"""
|
||||
|
||||
return tmpdir.join("foo.tar")
|
||||
|
||||
|
||||
def test_archived_image_action_when_missing(tar_file_name: str) -> None:
|
||||
fake_name = "a:latest"
|
||||
fake_id = "a1"
|
||||
|
||||
expected = f"Archived image {fake_name} to {tar_file_name}, since none present"
|
||||
|
||||
actual = ImageManager.archived_image_action(
|
||||
assert_no_logging, tar_file_name, fake_name, api_image_id(fake_id)
|
||||
)
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_archived_image_action_when_current(tar_file_name: str) -> None:
|
||||
fake_name = "b:latest"
|
||||
fake_id = "b2"
|
||||
|
||||
write_imitation_archive(tar_file_name, fake_id, [fake_name])
|
||||
|
||||
actual = ImageManager.archived_image_action(
|
||||
assert_no_logging, tar_file_name, fake_name, api_image_id(fake_id)
|
||||
)
|
||||
|
||||
assert actual is None
|
||||
|
||||
|
||||
def test_archived_image_action_when_invalid(tar_file_name: str) -> None:
|
||||
fake_name = "c:1.2.3"
|
||||
fake_id = "c3"
|
||||
|
||||
write_irrelevant_tar(tar_file_name)
|
||||
|
||||
expected = f"Archived image {fake_name} to {tar_file_name}, overwriting an unreadable archive file"
|
||||
|
||||
actual_log: list[str] = []
|
||||
actual = ImageManager.archived_image_action(
|
||||
capture_logging(actual_log), tar_file_name, fake_name, api_image_id(fake_id)
|
||||
)
|
||||
|
||||
assert actual == expected
|
||||
|
||||
assert len(actual_log) == 1
|
||||
assert actual_log[0].startswith("Unable to extract manifest summary from archive")
|
||||
|
||||
|
||||
def test_archived_image_action_when_obsolete_by_id(tar_file_name: str) -> None:
|
||||
fake_name = "d:0.0.1"
|
||||
old_id = "e5"
|
||||
new_id = "d4"
|
||||
|
||||
write_imitation_archive(tar_file_name, old_id, [fake_name])
|
||||
|
||||
expected = f"Archived image {fake_name} to {tar_file_name}, overwriting archive with image {old_id} named {fake_name}"
|
||||
actual = ImageManager.archived_image_action(
|
||||
assert_no_logging, tar_file_name, fake_name, api_image_id(new_id)
|
||||
)
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_archived_image_action_when_obsolete_by_name(tar_file_name: str) -> None:
|
||||
old_name = "hi"
|
||||
new_name = "d:0.0.1"
|
||||
fake_id = "d4"
|
||||
|
||||
write_imitation_archive(tar_file_name, fake_id, [old_name])
|
||||
|
||||
expected = f"Archived image {new_name} to {tar_file_name}, overwriting archive with image {fake_id} named {old_name}"
|
||||
actual = ImageManager.archived_image_action(
|
||||
assert_no_logging, tar_file_name, new_name, api_image_id(fake_id)
|
||||
)
|
||||
|
||||
print(f"actual : {actual}")
|
||||
print(f"expected : {expected}")
|
||||
assert actual == expected
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# Copyright 2024 Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.modules.docker_image_build import (
|
||||
_quote_csv,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, expected",
|
||||
[
|
||||
("", ""),
|
||||
(" ", '" "'),
|
||||
(",", '","'),
|
||||
('"', '""""'),
|
||||
('\rhello, "hi" !\n', '"\rhello, ""hi"" !\n"'),
|
||||
],
|
||||
)
|
||||
def test__quote_csv(value: str, expected: str) -> None:
|
||||
assert _quote_csv(value) == expected
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""Unit tests for docker_network."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.modules.docker_network import (
|
||||
validate_cidr,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cidr,expected",
|
||||
[
|
||||
("192.168.0.1/16", "ipv4"),
|
||||
("192.168.0.1/24", "ipv4"),
|
||||
("192.168.0.1/32", "ipv4"),
|
||||
("fdd1:ac8c:0557:7ce2::/64", "ipv6"),
|
||||
("fdd1:ac8c:0557:7ce2::/128", "ipv6"),
|
||||
],
|
||||
)
|
||||
def test_validate_cidr_positives(
|
||||
cidr: str, expected: t.Literal["ipv4", "ipv6"]
|
||||
) -> None:
|
||||
assert validate_cidr(cidr) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cidr",
|
||||
[
|
||||
"192.168.0.1",
|
||||
"192.168.0.1/34",
|
||||
"192.168.0.1/asd",
|
||||
"fdd1:ac8c:0557:7ce2::",
|
||||
],
|
||||
)
|
||||
def test_validate_cidr_negatives(cidr: str) -> None:
|
||||
with pytest.raises(ValueError) as e:
|
||||
validate_cidr(cidr)
|
||||
assert f'"{cidr}" is not a valid CIDR' == str(e.value)
|
||||
|
|
@ -0,0 +1,377 @@
|
|||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.docker.plugins.modules import (
|
||||
docker_swarm_service,
|
||||
)
|
||||
|
||||
APIError = pytest.importorskip("docker.errors.APIError")
|
||||
|
||||
|
||||
def test_retry_on_out_of_sequence_error(mocker: t.Any) -> None:
|
||||
run_mock = mocker.MagicMock(
|
||||
side_effect=APIError(
|
||||
message="",
|
||||
response=None,
|
||||
explanation="rpc error: code = Unknown desc = update out of sequence",
|
||||
)
|
||||
)
|
||||
mocker.patch("time.sleep")
|
||||
manager = docker_swarm_service.DockerServiceManager(client=None) # type: ignore
|
||||
manager.run = run_mock # type: ignore
|
||||
with pytest.raises(APIError):
|
||||
manager.run_safe()
|
||||
assert run_mock.call_count == 3
|
||||
|
||||
|
||||
def test_no_retry_on_general_api_error(mocker: t.Any) -> None:
|
||||
run_mock = mocker.MagicMock(
|
||||
side_effect=APIError(message="", response=None, explanation="some error")
|
||||
)
|
||||
mocker.patch("time.sleep")
|
||||
manager = docker_swarm_service.DockerServiceManager(client=None) # type: ignore
|
||||
manager.run = run_mock # type: ignore
|
||||
with pytest.raises(APIError):
|
||||
manager.run_safe()
|
||||
assert run_mock.call_count == 1
|
||||
|
||||
|
||||
def test_get_docker_environment(mocker: t.Any) -> None:
|
||||
env_file_result = {"TEST1": "A", "TEST2": "B", "TEST3": "C"}
|
||||
env_dict = {"TEST3": "CC", "TEST4": "D"}
|
||||
env_string = "TEST3=CC,TEST4=D"
|
||||
|
||||
env_list = ["TEST3=CC", "TEST4=D"]
|
||||
expected_result = sorted(["TEST1=A", "TEST2=B", "TEST3=CC", "TEST4=D"])
|
||||
mocker.patch.object(
|
||||
docker_swarm_service, "parse_env_file", return_value=env_file_result
|
||||
)
|
||||
mocker.patch.object(
|
||||
docker_swarm_service,
|
||||
"format_environment",
|
||||
side_effect=lambda d: [f"{key}={value}" for key, value in d.items()],
|
||||
)
|
||||
# Test with env dict and file
|
||||
result = docker_swarm_service.get_docker_environment(
|
||||
env_dict, env_files=["dummypath"]
|
||||
)
|
||||
assert result == expected_result
|
||||
# Test with env list and file
|
||||
result = docker_swarm_service.get_docker_environment(
|
||||
env_list, env_files=["dummypath"]
|
||||
)
|
||||
assert result == expected_result
|
||||
# Test with env string and file
|
||||
result = docker_swarm_service.get_docker_environment(
|
||||
env_string, env_files=["dummypath"]
|
||||
)
|
||||
assert result == expected_result
|
||||
|
||||
assert result == expected_result
|
||||
# Test with empty env
|
||||
result = docker_swarm_service.get_docker_environment([], env_files=None)
|
||||
assert result == []
|
||||
# Test with empty env_files
|
||||
result = docker_swarm_service.get_docker_environment(None, env_files=[])
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_get_nanoseconds_from_raw_option() -> None:
|
||||
value = docker_swarm_service.get_nanoseconds_from_raw_option("test", None)
|
||||
assert value is None
|
||||
|
||||
value = docker_swarm_service.get_nanoseconds_from_raw_option("test", "1m30s535ms")
|
||||
assert value == 90535000000
|
||||
|
||||
value = docker_swarm_service.get_nanoseconds_from_raw_option("test", 10000000000)
|
||||
assert value == 10000000000
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
docker_swarm_service.get_nanoseconds_from_raw_option("test", [])
|
||||
|
||||
|
||||
def test_has_dict_changed() -> None:
|
||||
assert not docker_swarm_service.has_dict_changed(
|
||||
{"a": 1},
|
||||
{"a": 1},
|
||||
)
|
||||
assert not docker_swarm_service.has_dict_changed({"a": 1}, {"a": 1, "b": 2})
|
||||
assert docker_swarm_service.has_dict_changed({"a": 1}, {"a": 2, "b": 2})
|
||||
assert docker_swarm_service.has_dict_changed({"a": 1, "b": 1}, {"a": 1})
|
||||
assert not docker_swarm_service.has_dict_changed(None, {"a": 2, "b": 2})
|
||||
assert docker_swarm_service.has_dict_changed({}, {"a": 2, "b": 2})
|
||||
assert docker_swarm_service.has_dict_changed({"a": 1}, {})
|
||||
assert docker_swarm_service.has_dict_changed({"a": 1}, None)
|
||||
assert not docker_swarm_service.has_dict_changed({}, {})
|
||||
assert not docker_swarm_service.has_dict_changed(None, None)
|
||||
assert not docker_swarm_service.has_dict_changed({}, None)
|
||||
assert not docker_swarm_service.has_dict_changed(None, {})
|
||||
|
||||
|
||||
def test_has_list_changed() -> None:
|
||||
# List comparisons without dictionaries
|
||||
# I could improve the indenting, but pycodestyle wants this instead
|
||||
assert not docker_swarm_service.has_list_changed(None, None)
|
||||
assert not docker_swarm_service.has_list_changed(None, [])
|
||||
assert not docker_swarm_service.has_list_changed(None, [1, 2])
|
||||
|
||||
assert not docker_swarm_service.has_list_changed([], None)
|
||||
assert not docker_swarm_service.has_list_changed([], [])
|
||||
assert docker_swarm_service.has_list_changed([], [1, 2])
|
||||
|
||||
assert docker_swarm_service.has_list_changed([1, 2], None)
|
||||
assert docker_swarm_service.has_list_changed([1, 2], [])
|
||||
|
||||
assert docker_swarm_service.has_list_changed([1, 2, 3], [1, 2])
|
||||
assert docker_swarm_service.has_list_changed([1, 2], [1, 2, 3])
|
||||
|
||||
# Check list sorting
|
||||
assert not docker_swarm_service.has_list_changed([1, 2], [2, 1])
|
||||
assert docker_swarm_service.has_list_changed([1, 2], [2, 1], sort_lists=False)
|
||||
|
||||
# Check type matching
|
||||
assert docker_swarm_service.has_list_changed([None, 1], [2, 1])
|
||||
assert docker_swarm_service.has_list_changed([2, 1], [None, 1])
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
["command --with args"], ["command", "--with", "args"]
|
||||
)
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
["sleep", "3400"], ["sleep", "3600"], sort_lists=False
|
||||
)
|
||||
|
||||
# List comparisons with dictionaries
|
||||
assert not docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}], [{"a": 1}], sort_key="a"
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}], sort_key="a"
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}]
|
||||
)
|
||||
|
||||
# List sort checking with sort key
|
||||
assert not docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}, {"a": 2}], [{"a": 2}, {"a": 1}], sort_key="a"
|
||||
)
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}, {"a": 2}], [{"a": 2}, {"a": 1}], sort_lists=False
|
||||
)
|
||||
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}, {"a": 2}, {"a": 3}], [{"a": 2}, {"a": 1}], sort_key="a"
|
||||
)
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
[{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}, {"a": 3}], sort_lists=False
|
||||
)
|
||||
|
||||
# Additional dictionary elements
|
||||
assert not docker_swarm_service.has_list_changed(
|
||||
[
|
||||
{"src": 1, "dst": 2},
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
],
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "tcp"},
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
],
|
||||
sort_key="dst",
|
||||
)
|
||||
assert not docker_swarm_service.has_list_changed(
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
{"src": 1, "dst": 3, "protocol": "tcp"},
|
||||
],
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
{"src": 1, "dst": 3, "protocol": "tcp"},
|
||||
],
|
||||
sort_key="dst",
|
||||
)
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
{"src": 1, "dst": 2},
|
||||
{"src": 3, "dst": 4},
|
||||
],
|
||||
[
|
||||
{"src": 1, "dst": 3, "protocol": "udp"},
|
||||
{"src": 1, "dst": 2, "protocol": "tcp"},
|
||||
{"src": 3, "dst": 4, "protocol": "tcp"},
|
||||
],
|
||||
sort_key="dst",
|
||||
)
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
[
|
||||
{"src": 1, "dst": 3, "protocol": "tcp"},
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
],
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "tcp"},
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
],
|
||||
sort_key="dst",
|
||||
)
|
||||
assert docker_swarm_service.has_list_changed(
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
{"src": 1, "dst": 2, "protocol": "tcp", "extra": {"test": "foo"}},
|
||||
],
|
||||
[
|
||||
{"src": 1, "dst": 2, "protocol": "udp"},
|
||||
{"src": 1, "dst": 2, "protocol": "tcp"},
|
||||
],
|
||||
sort_key="dst",
|
||||
)
|
||||
assert not docker_swarm_service.has_list_changed(
|
||||
[{"id": "123", "aliases": []}], [{"id": "123"}], sort_key="id"
|
||||
)
|
||||
|
||||
|
||||
def test_have_networks_changed() -> None:
|
||||
assert not docker_swarm_service.have_networks_changed(None, None)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed([], None)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed([{"id": 1}], [{"id": 1}])
|
||||
|
||||
assert docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}], [{"id": 1}, {"id": 2}]
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2}], [{"id": 1}, {"id": 2}]
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2}], [{"id": 2}, {"id": 1}]
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2, "aliases": []}], [{"id": 1}, {"id": 2}]
|
||||
)
|
||||
|
||||
assert docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias1"]}], [{"id": 1}, {"id": 2}]
|
||||
)
|
||||
|
||||
assert docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias1", "alias2"]}],
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias1"]}],
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias1", "alias2"]}],
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias1", "alias2"]}],
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias1", "alias2"]}],
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias2", "alias1"]}],
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[{"id": 1, "options": {}}, {"id": 2, "aliases": ["alias1", "alias2"]}],
|
||||
[{"id": 1}, {"id": 2, "aliases": ["alias2", "alias1"]}],
|
||||
)
|
||||
|
||||
assert not docker_swarm_service.have_networks_changed(
|
||||
[
|
||||
{"id": 1, "options": {"option1": "value1"}},
|
||||
{"id": 2, "aliases": ["alias1", "alias2"]},
|
||||
],
|
||||
[
|
||||
{"id": 1, "options": {"option1": "value1"}},
|
||||
{"id": 2, "aliases": ["alias2", "alias1"]},
|
||||
],
|
||||
)
|
||||
|
||||
assert docker_swarm_service.have_networks_changed(
|
||||
[
|
||||
{"id": 1, "options": {"option1": "value1"}},
|
||||
{"id": 2, "aliases": ["alias1", "alias2"]},
|
||||
],
|
||||
[
|
||||
{"id": 1, "options": {"option1": "value2"}},
|
||||
{"id": 2, "aliases": ["alias2", "alias1"]},
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_get_docker_networks() -> None:
|
||||
network_names = [
|
||||
"network_1",
|
||||
"network_2",
|
||||
"network_3",
|
||||
"network_4",
|
||||
]
|
||||
networks: list[str | dict[str, t.Any]] = [
|
||||
network_names[0],
|
||||
{"name": network_names[1]},
|
||||
{"name": network_names[2], "aliases": ["networkalias1"]},
|
||||
{
|
||||
"name": network_names[3],
|
||||
"aliases": ["networkalias2"],
|
||||
"options": {"foo": "bar"},
|
||||
},
|
||||
]
|
||||
network_ids = {
|
||||
network_names[0]: "1",
|
||||
network_names[1]: "2",
|
||||
network_names[2]: "3",
|
||||
network_names[3]: "4",
|
||||
}
|
||||
parsed_networks = docker_swarm_service.get_docker_networks(networks, network_ids)
|
||||
assert len(parsed_networks) == 4
|
||||
for i, network in enumerate(parsed_networks):
|
||||
assert "name" not in network
|
||||
assert "id" in network
|
||||
expected_name = network_names[i]
|
||||
assert network["id"] == network_ids[expected_name]
|
||||
if i == 2:
|
||||
assert network["aliases"] == ["networkalias1"]
|
||||
if i == 3:
|
||||
assert network["aliases"] == ["networkalias2"]
|
||||
if i == 3:
|
||||
assert "foo" in network["options"]
|
||||
# Test missing name
|
||||
with pytest.raises(TypeError):
|
||||
docker_swarm_service.get_docker_networks([{"invalid": "err"}], {"err": "x"})
|
||||
# test for invalid aliases type
|
||||
with pytest.raises(TypeError):
|
||||
docker_swarm_service.get_docker_networks(
|
||||
[{"name": "test", "aliases": 1}], {"test": "x"}
|
||||
)
|
||||
# Test invalid aliases elements
|
||||
with pytest.raises(TypeError):
|
||||
docker_swarm_service.get_docker_networks(
|
||||
[{"name": "test", "aliases": [1]}], {"test": "x"}
|
||||
)
|
||||
# Test for invalid options type
|
||||
with pytest.raises(TypeError):
|
||||
docker_swarm_service.get_docker_networks(
|
||||
[{"name": "test", "options": 1}], {"test": "x"}
|
||||
)
|
||||
# Test for non existing networks
|
||||
with pytest.raises(ValueError):
|
||||
docker_swarm_service.get_docker_networks(
|
||||
[{"name": "idontexist"}], {"test": "x"}
|
||||
)
|
||||
# Test empty values
|
||||
assert docker_swarm_service.get_docker_networks([], {}) == []
|
||||
assert docker_swarm_service.get_docker_networks(None, {}) is None
|
||||
# Test invalid options
|
||||
with pytest.raises(TypeError):
|
||||
docker_swarm_service.get_docker_networks(
|
||||
[{"name": "test", "nonexisting_option": "foo"}], {"test": "1"}
|
||||
)
|
||||
|
|
@ -0,0 +1,169 @@
|
|||
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.utils.trust import (
|
||||
SUPPORTS_DATA_TAGGING,
|
||||
)
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.utils.trust import (
|
||||
is_trusted as _is_trusted,
|
||||
)
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.utils.trust import (
|
||||
make_trusted as _make_trusted,
|
||||
)
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.utils.trust import (
|
||||
make_untrusted as _make_untrusted,
|
||||
)
|
||||
|
||||
from ansible_collections.community.docker.plugins.plugin_utils._unsafe import (
|
||||
make_unsafe,
|
||||
)
|
||||
|
||||
TEST_MAKE_UNSAFE: list[
|
||||
tuple[t.Any, list[tuple[t.Any, ...]], list[tuple[t.Any, ...]]]
|
||||
] = [
|
||||
(
|
||||
_make_trusted("text"),
|
||||
[],
|
||||
[
|
||||
(),
|
||||
],
|
||||
),
|
||||
(
|
||||
_make_trusted("{{text}}"),
|
||||
[
|
||||
(),
|
||||
],
|
||||
[],
|
||||
),
|
||||
(
|
||||
{
|
||||
_make_trusted("skey"): _make_trusted("value"),
|
||||
_make_trusted("ukey"): _make_trusted("{{value}}"),
|
||||
1: [
|
||||
_make_trusted("value"),
|
||||
_make_trusted("{{value}}"),
|
||||
{
|
||||
1.0: _make_trusted("{{value}}"),
|
||||
2.0: _make_trusted("value"),
|
||||
},
|
||||
],
|
||||
},
|
||||
[
|
||||
("ukey",),
|
||||
(1, 1),
|
||||
(1, 2, 1.0),
|
||||
],
|
||||
[
|
||||
("skey",),
|
||||
(1, 0),
|
||||
(1, 2, 2.0),
|
||||
],
|
||||
),
|
||||
(
|
||||
[_make_trusted("value"), _make_trusted("{{value}}")],
|
||||
[
|
||||
(1,),
|
||||
],
|
||||
[
|
||||
(0,),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
if not SUPPORTS_DATA_TAGGING:
|
||||
TEST_MAKE_UNSAFE.extend(
|
||||
[
|
||||
(
|
||||
_make_trusted(b"text"),
|
||||
[],
|
||||
[
|
||||
(),
|
||||
],
|
||||
),
|
||||
(
|
||||
_make_trusted(b"{{text}}"),
|
||||
[
|
||||
(),
|
||||
],
|
||||
[],
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, check_unsafe_paths, check_safe_paths", TEST_MAKE_UNSAFE
|
||||
)
|
||||
def test_make_unsafe(
|
||||
value: t.Any,
|
||||
check_unsafe_paths: list[tuple[t.Any, ...]],
|
||||
check_safe_paths: list[tuple[t.Any, ...]],
|
||||
) -> None:
|
||||
unsafe_value = make_unsafe(value)
|
||||
assert unsafe_value == value
|
||||
for check_path in check_unsafe_paths:
|
||||
obj = unsafe_value
|
||||
for elt in check_path:
|
||||
obj = obj[elt]
|
||||
assert not _is_trusted(obj)
|
||||
for check_path in check_safe_paths:
|
||||
obj = unsafe_value
|
||||
for elt in check_path:
|
||||
obj = obj[elt]
|
||||
assert _is_trusted(obj)
|
||||
|
||||
|
||||
def test_make_unsafe_idempotence() -> None:
|
||||
assert make_unsafe(None) is None
|
||||
|
||||
unsafe_str = _make_untrusted("{{test}}")
|
||||
assert id(make_unsafe(unsafe_str)) == id(unsafe_str)
|
||||
|
||||
safe_str = _make_trusted("{{test}}")
|
||||
assert id(make_unsafe(safe_str)) != id(safe_str)
|
||||
|
||||
|
||||
def test_make_unsafe_dict_key() -> None:
|
||||
value: dict[t.Any, t.Any] = {
|
||||
_make_trusted("test"): 2,
|
||||
}
|
||||
if not SUPPORTS_DATA_TAGGING:
|
||||
value[_make_trusted(b"test")] = 1
|
||||
unsafe_value = make_unsafe(value)
|
||||
assert unsafe_value == value
|
||||
for obj in unsafe_value:
|
||||
assert _is_trusted(obj)
|
||||
|
||||
value = {
|
||||
_make_trusted("{{test}}"): 2,
|
||||
}
|
||||
if not SUPPORTS_DATA_TAGGING:
|
||||
value[_make_trusted(b"{{test}}")] = 1
|
||||
unsafe_value = make_unsafe(value)
|
||||
assert unsafe_value == value
|
||||
for obj in unsafe_value:
|
||||
assert not _is_trusted(obj)
|
||||
|
||||
|
||||
def test_make_unsafe_set() -> None:
|
||||
value: set[t.Any] = set([_make_trusted("test")])
|
||||
if not SUPPORTS_DATA_TAGGING:
|
||||
value.add(_make_trusted(b"test"))
|
||||
unsafe_value = make_unsafe(value)
|
||||
assert unsafe_value == value
|
||||
for obj in unsafe_value:
|
||||
assert _is_trusted(obj)
|
||||
|
||||
value = set([_make_trusted("{{test}}")])
|
||||
if not SUPPORTS_DATA_TAGGING:
|
||||
value.add(_make_trusted(b"{{test}}"))
|
||||
unsafe_value = make_unsafe(value)
|
||||
assert unsafe_value == value
|
||||
for obj in unsafe_value:
|
||||
assert not _is_trusted(obj)
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
# Copyright 2022 Red Hat | Ansible
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import tarfile
|
||||
import typing as t
|
||||
from tempfile import TemporaryFile
|
||||
|
||||
|
||||
def write_imitation_archive(
|
||||
file_name: str, image_id: str, repo_tags: list[str]
|
||||
) -> None:
|
||||
"""
|
||||
Write a tar file meeting these requirements:
|
||||
|
||||
* Has a file manifest.json
|
||||
* manifest.json contains a one-element array
|
||||
* The element has a Config property with "[image_id].json" as the value name
|
||||
|
||||
:param file_name: Name of file to create
|
||||
:type file_name: str
|
||||
:param image_id: Fake sha256 hash (without the sha256: prefix)
|
||||
:type image_id: str
|
||||
:param repo_tags: list of fake image tags
|
||||
:type repo_tags: list
|
||||
"""
|
||||
|
||||
manifest = [{"Config": f"{image_id}.json", "RepoTags": repo_tags}]
|
||||
|
||||
write_imitation_archive_with_manifest(file_name, manifest)
|
||||
|
||||
|
||||
def write_imitation_archive_with_manifest(
|
||||
file_name: str, manifest: list[dict[str, t.Any]]
|
||||
) -> None:
|
||||
with tarfile.open(file_name, "w") as tf, TemporaryFile() as f:
|
||||
f.write(json.dumps(manifest).encode("utf-8"))
|
||||
|
||||
ti = tarfile.TarInfo("manifest.json")
|
||||
ti.size = f.tell()
|
||||
|
||||
f.seek(0)
|
||||
tf.addfile(ti, f)
|
||||
|
||||
|
||||
def write_irrelevant_tar(file_name: str) -> None:
|
||||
"""
|
||||
Create a tar file that does not match the spec for "docker image save" / "docker image load" commands.
|
||||
|
||||
:param file_name: Name of tar file to create
|
||||
:type file_name: str
|
||||
"""
|
||||
|
||||
with tarfile.open(file_name, "w") as tf, TemporaryFile() as f:
|
||||
f.write("Hello, world.".encode("utf-8"))
|
||||
|
||||
ti = tarfile.TarInfo("hi.txt")
|
||||
ti.size = f.tell()
|
||||
|
||||
f.seek(0)
|
||||
tf.addfile(ti, f)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
docker
|
||||
requests
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
collections:
|
||||
- community.internal_test_tools
|
||||
- community.library_inventory_filtering_v1
|
||||
Loading…
Add table
Add a link
Reference in a new issue