2017-02-14 01:39:08 +01:00
|
|
|
import contextlib
|
2016-12-10 00:53:23 +01:00
|
|
|
import logging
|
|
|
|
import os
|
2024-12-30 11:59:07 +01:00
|
|
|
import pathlib
|
2021-05-10 22:35:53 +01:00
|
|
|
import re
|
2016-12-10 00:53:23 +01:00
|
|
|
import shlex
|
|
|
|
import socket
|
|
|
|
import subprocess
|
|
|
|
import time
|
2024-12-30 11:59:07 +01:00
|
|
|
from io import StringIO
|
2024-12-27 21:36:07 +01:00
|
|
|
from typing import Iterator, List, Optional
|
2016-12-10 00:53:23 +01:00
|
|
|
|
|
|
|
import backoff
|
2024-12-24 13:53:09 +01:00
|
|
|
import docker.errors
|
2016-12-10 00:53:23 +01:00
|
|
|
import pytest
|
|
|
|
import requests
|
2024-12-26 16:12:10 +01:00
|
|
|
from _pytest.fixtures import FixtureRequest
|
|
|
|
from docker import DockerClient
|
2021-05-10 22:35:53 +01:00
|
|
|
from docker.models.containers import Container
|
2024-12-26 16:12:10 +01:00
|
|
|
from docker.models.networks import Network
|
|
|
|
from packaging.version import Version
|
|
|
|
from requests import Response
|
2024-12-26 01:13:29 +01:00
|
|
|
from urllib3.util.connection import HAS_IPV6
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2017-01-27 03:31:33 +01:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
2016-12-10 00:53:23 +01:00
|
|
|
logging.getLogger('backoff').setLevel(logging.INFO)
|
2017-02-14 01:39:08 +01:00
|
|
|
logging.getLogger('DNS').setLevel(logging.DEBUG)
|
2017-02-11 00:02:42 +01:00
|
|
|
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
CA_ROOT_CERTIFICATE = pathlib.Path(__file__).parent.joinpath("certs/ca-root.crt")
|
2021-12-31 20:43:02 +13:00
|
|
|
PYTEST_RUNNING_IN_CONTAINER = os.environ.get('PYTEST_RUNNING_IN_CONTAINER') == "1"
|
2017-02-14 01:39:08 +01:00
|
|
|
FORCE_CONTAINER_IPV6 = False # ugly global state to consider containers' IPv6 address instead of IPv4
|
|
|
|
|
2023-12-19 21:07:30 +01:00
|
|
|
DOCKER_COMPOSE = os.environ.get('DOCKER_COMPOSE', 'docker compose')
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2017-02-11 00:02:42 +01:00
|
|
|
docker_client = docker.from_env()
|
2021-12-31 22:12:25 +13:00
|
|
|
|
|
|
|
# Name of pytest container to reference if it's being used for running tests
|
|
|
|
test_container = 'nginx-proxy-pytest'
|
2017-02-11 00:02:42 +01:00
|
|
|
|
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
###############################################################################
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2016-12-10 00:53:23 +01:00
|
|
|
# utilities
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2016-12-10 00:53:23 +01:00
|
|
|
###############################################################################
|
|
|
|
|
2024-12-24 13:53:09 +01:00
|
|
|
|
2017-02-14 01:39:08 +01:00
|
|
|
@contextlib.contextmanager
|
2024-12-26 16:12:10 +01:00
|
|
|
def ipv6(force_ipv6: bool = True):
|
2017-02-14 01:39:08 +01:00
|
|
|
"""
|
|
|
|
Meant to be used as a context manager to force IPv6 sockets:
|
|
|
|
|
|
|
|
with ipv6():
|
2023-05-22 12:14:28 +02:00
|
|
|
nginxproxy.get("http://something.nginx-proxy.example") # force use of IPv6
|
2017-02-14 01:39:08 +01:00
|
|
|
|
|
|
|
with ipv6(False):
|
2023-05-22 12:14:28 +02:00
|
|
|
nginxproxy.get("http://something.nginx-proxy.example") # legacy behavior
|
2017-02-14 01:39:08 +01:00
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
global FORCE_CONTAINER_IPV6
|
|
|
|
FORCE_CONTAINER_IPV6 = force_ipv6
|
|
|
|
yield
|
|
|
|
FORCE_CONTAINER_IPV6 = False
|
|
|
|
|
2017-01-24 22:42:13 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
class RequestsForDocker:
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
2022-01-01 01:39:51 +13:00
|
|
|
Proxy for calling methods of the requests module.
|
2024-12-24 13:53:09 +01:00
|
|
|
When an HTTP response failed due to HTTP Error 404 or 502, retry a few times.
|
2017-02-11 00:12:55 +01:00
|
|
|
Provides method `get_conf` to extract the nginx-proxy configuration content.
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.session = requests.Session()
|
2024-12-30 11:59:07 +01:00
|
|
|
if CA_ROOT_CERTIFICATE.is_file():
|
|
|
|
self.session.verify = CA_ROOT_CERTIFICATE.as_posix()
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2021-05-10 22:35:53 +01:00
|
|
|
@staticmethod
|
2024-12-26 16:12:10 +01:00
|
|
|
def get_nginx_proxy_container() -> Container:
|
2017-01-27 02:07:56 +01:00
|
|
|
"""
|
2021-05-10 22:35:53 +01:00
|
|
|
Return list of containers
|
2017-01-27 02:07:56 +01:00
|
|
|
"""
|
2021-04-01 16:06:09 +02:00
|
|
|
nginx_proxy_containers = docker_client.containers.list(filters={"ancestor": "nginxproxy/nginx-proxy:test"})
|
2017-01-27 02:07:56 +01:00
|
|
|
if len(nginx_proxy_containers) > 1:
|
2021-04-01 16:06:09 +02:00
|
|
|
pytest.fail("Too many running nginxproxy/nginx-proxy:test containers", pytrace=False)
|
2017-01-27 02:07:56 +01:00
|
|
|
elif len(nginx_proxy_containers) == 0:
|
2021-04-01 16:06:09 +02:00
|
|
|
pytest.fail("No running nginxproxy/nginx-proxy:test container", pytrace=False)
|
2024-12-26 16:12:10 +01:00
|
|
|
return nginx_proxy_containers.pop()
|
2021-05-10 22:35:53 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def get_conf(self) -> bytes:
|
2021-05-10 22:35:53 +01:00
|
|
|
"""
|
|
|
|
Return the nginx config file
|
|
|
|
"""
|
2024-12-26 16:12:10 +01:00
|
|
|
nginx_proxy_container = self.get_nginx_proxy_container()
|
|
|
|
return get_nginx_conf_from_container(nginx_proxy_container)
|
2017-01-27 02:07:56 +01:00
|
|
|
|
2021-05-10 22:35:53 +01:00
|
|
|
def get_ip(self) -> str:
|
|
|
|
"""
|
|
|
|
Return the nginx container ip address
|
|
|
|
"""
|
2024-12-26 16:12:10 +01:00
|
|
|
nginx_proxy_container = self.get_nginx_proxy_container()
|
|
|
|
return container_ip(nginx_proxy_container)
|
2021-05-10 22:35:53 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def get(self, *args, **kwargs) -> Response:
|
2017-02-14 01:39:08 +01:00
|
|
|
with ipv6(kwargs.pop('ipv6', False)):
|
|
|
|
@backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)
|
2024-12-24 13:53:09 +01:00
|
|
|
def _get(*_args, **_kwargs):
|
|
|
|
return self.session.get(*_args, **_kwargs)
|
2017-02-14 01:39:08 +01:00
|
|
|
return _get(*args, **kwargs)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def post(self, *args, **kwargs) -> Response:
|
2017-02-14 01:39:08 +01:00
|
|
|
with ipv6(kwargs.pop('ipv6', False)):
|
|
|
|
@backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)
|
2024-12-24 13:53:09 +01:00
|
|
|
def _post(*_args, **_kwargs):
|
|
|
|
return self.session.post(*_args, **_kwargs)
|
2017-02-14 01:39:08 +01:00
|
|
|
return _post(*args, **kwargs)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def put(self, *args, **kwargs) -> Response:
|
2017-02-14 01:39:08 +01:00
|
|
|
with ipv6(kwargs.pop('ipv6', False)):
|
|
|
|
@backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)
|
2024-12-24 13:53:09 +01:00
|
|
|
def _put(*_args, **_kwargs):
|
|
|
|
return self.session.put(*_args, **_kwargs)
|
2017-02-14 01:39:08 +01:00
|
|
|
return _put(*args, **kwargs)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def head(self, *args, **kwargs) -> Response:
|
2017-02-14 01:39:08 +01:00
|
|
|
with ipv6(kwargs.pop('ipv6', False)):
|
|
|
|
@backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)
|
2024-12-24 13:53:09 +01:00
|
|
|
def _head(*_args, **_kwargs):
|
|
|
|
return self.session.head(*_args, **_kwargs)
|
2017-02-14 01:39:08 +01:00
|
|
|
return _head(*args, **kwargs)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def delete(self, *args, **kwargs) -> Response:
|
2017-02-14 01:39:08 +01:00
|
|
|
with ipv6(kwargs.pop('ipv6', False)):
|
|
|
|
@backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)
|
2024-12-24 13:53:09 +01:00
|
|
|
def _delete(*_args, **_kwargs):
|
|
|
|
return self.session.delete(*_args, **_kwargs)
|
2017-02-14 01:39:08 +01:00
|
|
|
return _delete(*args, **kwargs)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def options(self, *args, **kwargs) -> Response:
|
2017-02-14 01:39:08 +01:00
|
|
|
with ipv6(kwargs.pop('ipv6', False)):
|
|
|
|
@backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)
|
2024-12-24 13:53:09 +01:00
|
|
|
def _options(*_args, **_kwargs):
|
|
|
|
return self.session.options(*_args, **_kwargs)
|
2017-02-14 01:39:08 +01:00
|
|
|
return _options(*args, **kwargs)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
return getattr(requests, name)
|
|
|
|
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def container_ip(container: Container) -> str:
|
2017-02-11 22:41:09 +01:00
|
|
|
"""
|
2017-02-14 01:39:08 +01:00
|
|
|
return the IP address of a container.
|
|
|
|
|
|
|
|
If the global FORCE_CONTAINER_IPV6 flag is set, return the IPv6 address
|
|
|
|
"""
|
|
|
|
global FORCE_CONTAINER_IPV6
|
|
|
|
if FORCE_CONTAINER_IPV6:
|
2024-12-26 01:13:29 +01:00
|
|
|
if not HAS_IPV6:
|
2017-02-14 01:39:08 +01:00
|
|
|
pytest.skip("This system does not support IPv6")
|
|
|
|
ip = container_ipv6(container)
|
|
|
|
if ip == '':
|
2021-03-19 12:12:24 +01:00
|
|
|
pytest.skip(f"Container {container.name} has no IPv6 address")
|
2017-02-14 01:39:08 +01:00
|
|
|
else:
|
|
|
|
return ip
|
|
|
|
else:
|
|
|
|
net_info = container.attrs["NetworkSettings"]["Networks"]
|
|
|
|
if "bridge" in net_info:
|
|
|
|
return net_info["bridge"]["IPAddress"]
|
2023-05-09 19:31:42 +02:00
|
|
|
|
|
|
|
# container is running in host network mode
|
|
|
|
if "host" in net_info:
|
|
|
|
return "127.0.0.1"
|
2017-02-14 01:39:08 +01:00
|
|
|
|
|
|
|
# not default bridge network, fallback on first network defined
|
2021-03-18 22:48:13 +01:00
|
|
|
network_name = list(net_info.keys())[0]
|
2017-02-14 01:39:08 +01:00
|
|
|
return net_info[network_name]["IPAddress"]
|
|
|
|
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def container_ipv6(container: Container) -> str:
|
2017-02-14 01:39:08 +01:00
|
|
|
"""
|
|
|
|
return the IPv6 address of a container.
|
2017-02-11 22:41:09 +01:00
|
|
|
"""
|
|
|
|
net_info = container.attrs["NetworkSettings"]["Networks"]
|
|
|
|
if "bridge" in net_info:
|
2017-02-14 01:39:08 +01:00
|
|
|
return net_info["bridge"]["GlobalIPv6Address"]
|
2023-05-09 19:31:42 +02:00
|
|
|
|
|
|
|
# container is running in host network mode
|
|
|
|
if "host" in net_info:
|
|
|
|
return "::1"
|
2017-02-11 22:41:09 +01:00
|
|
|
|
|
|
|
# not default bridge network, fallback on first network defined
|
2021-03-18 22:48:13 +01:00
|
|
|
network_name = list(net_info.keys())[0]
|
2017-02-14 01:39:08 +01:00
|
|
|
return net_info[network_name]["GlobalIPv6Address"]
|
2017-02-11 22:41:09 +01:00
|
|
|
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def nginx_proxy_dns_resolver(domain_name: str) -> Optional[str]:
|
2017-02-11 22:41:09 +01:00
|
|
|
"""
|
2017-02-14 01:39:08 +01:00
|
|
|
if "nginx-proxy" if found in host, return the ip address of the docker container
|
2021-04-01 16:06:09 +02:00
|
|
|
issued from the docker image nginxproxy/nginx-proxy:test.
|
2017-02-11 22:41:09 +01:00
|
|
|
|
|
|
|
:return: IP or None
|
|
|
|
"""
|
|
|
|
log = logging.getLogger('DNS')
|
2021-03-19 12:12:24 +01:00
|
|
|
log.debug(f"nginx_proxy_dns_resolver({domain_name!r})")
|
2017-02-11 22:41:09 +01:00
|
|
|
if 'nginx-proxy' in domain_name:
|
2021-04-01 16:06:09 +02:00
|
|
|
nginxproxy_containers = docker_client.containers.list(filters={"status": "running", "ancestor": "nginxproxy/nginx-proxy:test"})
|
2017-02-11 22:41:09 +01:00
|
|
|
if len(nginxproxy_containers) == 0:
|
2024-12-24 13:53:09 +01:00
|
|
|
log.warning(f"no container found from image nginxproxy/nginx-proxy:test while resolving {domain_name!r}")
|
2022-02-20 15:10:05 +01:00
|
|
|
exited_nginxproxy_containers = docker_client.containers.list(filters={"status": "exited", "ancestor": "nginxproxy/nginx-proxy:test"})
|
|
|
|
if len(exited_nginxproxy_containers) > 0:
|
|
|
|
exited_nginxproxy_container_logs = exited_nginxproxy_containers[0].logs()
|
2024-12-24 13:53:09 +01:00
|
|
|
log.warning(f"nginxproxy/nginx-proxy:test container might have exited unexpectedly. Container logs: " + "\n" + exited_nginxproxy_container_logs.decode())
|
2024-12-26 16:12:10 +01:00
|
|
|
return None
|
2017-02-11 22:41:09 +01:00
|
|
|
nginxproxy_container = nginxproxy_containers[0]
|
|
|
|
ip = container_ip(nginxproxy_container)
|
2021-03-19 12:12:24 +01:00
|
|
|
log.info(f"resolving domain name {domain_name!r} as IP address {ip} of nginx-proxy container {nginxproxy_container.name}")
|
2017-02-11 22:41:09 +01:00
|
|
|
return ip
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def docker_container_dns_resolver(domain_name: str) -> Optional[str]:
|
2017-02-11 22:41:09 +01:00
|
|
|
"""
|
|
|
|
if domain name is of the form "XXX.container.docker" or "anything.XXX.container.docker", return the ip address of the docker container
|
|
|
|
named XXX.
|
|
|
|
|
|
|
|
:return: IP or None
|
|
|
|
"""
|
|
|
|
log = logging.getLogger('DNS')
|
2021-03-19 12:12:24 +01:00
|
|
|
log.debug(f"docker_container_dns_resolver({domain_name!r})")
|
2017-02-11 22:41:09 +01:00
|
|
|
|
2021-03-18 22:48:13 +01:00
|
|
|
match = re.search(r'(^|.+\.)(?P<container>[^.]+)\.container\.docker$', domain_name)
|
2017-02-11 22:41:09 +01:00
|
|
|
if not match:
|
2021-03-19 12:12:24 +01:00
|
|
|
log.debug(f"{domain_name!r} does not match")
|
2024-12-26 16:12:10 +01:00
|
|
|
return None
|
2017-02-11 22:41:09 +01:00
|
|
|
|
|
|
|
container_name = match.group('container')
|
2021-03-19 12:12:24 +01:00
|
|
|
log.debug(f"looking for container {container_name!r}")
|
2017-02-11 22:41:09 +01:00
|
|
|
try:
|
|
|
|
container = docker_client.containers.get(container_name)
|
|
|
|
except docker.errors.NotFound:
|
2024-12-24 13:53:09 +01:00
|
|
|
log.warning(f"container named {container_name!r} not found while resolving {domain_name!r}")
|
2024-12-26 16:12:10 +01:00
|
|
|
return None
|
2021-03-19 12:12:24 +01:00
|
|
|
log.debug(f"container {container.name!r} found ({container.short_id})")
|
2017-02-11 22:41:09 +01:00
|
|
|
|
|
|
|
ip = container_ip(container)
|
2021-03-19 12:12:24 +01:00
|
|
|
log.info(f"resolving domain name {domain_name!r} as IP address {ip} of container {container.name}")
|
2022-01-01 01:39:51 +13:00
|
|
|
return ip
|
2017-02-11 22:41:09 +01:00
|
|
|
|
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
def monkey_patch_urllib_dns_resolver():
|
|
|
|
"""
|
|
|
|
Alter the behavior of the urllib DNS resolver so that any domain name
|
|
|
|
containing substring 'nginx-proxy' will resolve to the IP address
|
2021-04-01 16:06:09 +02:00
|
|
|
of the container created from image 'nginxproxy/nginx-proxy:test'.
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
|
|
|
prv_getaddrinfo = socket.getaddrinfo
|
|
|
|
dns_cache = {}
|
|
|
|
def new_getaddrinfo(*args):
|
2021-03-19 12:12:24 +01:00
|
|
|
logging.getLogger('DNS').debug(f"resolving domain name {repr(args)}")
|
2017-02-14 01:39:08 +01:00
|
|
|
_args = list(args)
|
2017-02-11 22:41:09 +01:00
|
|
|
|
2024-12-24 13:53:09 +01:00
|
|
|
# Fail early when querying IP directly, and it is forced ipv6 when not supported,
|
2022-01-01 01:38:13 +13:00
|
|
|
# Otherwise a pytest container not using the host network fails to pass `test_raw-ip-vhost`.
|
2024-12-26 01:13:29 +01:00
|
|
|
if FORCE_CONTAINER_IPV6 and not HAS_IPV6:
|
2022-01-01 01:38:13 +13:00
|
|
|
pytest.skip("This system does not support IPv6")
|
|
|
|
|
2017-02-11 22:41:09 +01:00
|
|
|
# custom DNS resolvers
|
|
|
|
ip = nginx_proxy_dns_resolver(args[0])
|
|
|
|
if ip is None:
|
|
|
|
ip = docker_container_dns_resolver(args[0])
|
|
|
|
if ip is not None:
|
2017-02-14 01:39:08 +01:00
|
|
|
_args[0] = ip
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2017-02-14 01:39:08 +01:00
|
|
|
# call on original DNS resolver, with eventually the original host changed to the wanted IP address
|
2016-12-10 00:53:23 +01:00
|
|
|
try:
|
2017-02-14 01:39:08 +01:00
|
|
|
return dns_cache[tuple(_args)]
|
2016-12-10 00:53:23 +01:00
|
|
|
except KeyError:
|
2017-02-14 01:39:08 +01:00
|
|
|
res = prv_getaddrinfo(*_args)
|
|
|
|
dns_cache[tuple(_args)] = res
|
2016-12-10 00:53:23 +01:00
|
|
|
return res
|
|
|
|
socket.getaddrinfo = new_getaddrinfo
|
|
|
|
return prv_getaddrinfo
|
|
|
|
|
2024-12-24 16:23:09 +01:00
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
def restore_urllib_dns_resolver(getaddrinfo_func):
|
|
|
|
socket.getaddrinfo = getaddrinfo_func
|
|
|
|
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def get_nginx_conf_from_container(container: Container) -> bytes:
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
|
|
|
return the nginx /etc/nginx/conf.d/default.conf file content from a container
|
|
|
|
"""
|
|
|
|
import tarfile
|
2021-03-18 22:48:13 +01:00
|
|
|
from io import BytesIO
|
|
|
|
|
|
|
|
strm_generator, stat = container.get_archive('/etc/nginx/conf.d/default.conf')
|
|
|
|
strm_fileobj = BytesIO(b"".join(strm_generator))
|
|
|
|
|
|
|
|
with tarfile.open(fileobj=strm_fileobj) as tf:
|
2016-12-10 00:53:23 +01:00
|
|
|
conffile = tf.extractfile('default.conf')
|
2018-04-22 16:03:43 -04:00
|
|
|
return conffile.read()
|
2016-12-10 00:53:23 +01:00
|
|
|
|
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
def __prepare_and_execute_compose_cmd(compose_files: List[str], project_name: str, cmd: str):
|
|
|
|
"""
|
|
|
|
Prepare and execute the Docker Compose command with the provided compose files and project name.
|
|
|
|
"""
|
|
|
|
compose_cmd = StringIO()
|
|
|
|
compose_cmd.write(DOCKER_COMPOSE)
|
|
|
|
compose_cmd.write(f" --project-name {project_name}")
|
|
|
|
for compose_file in compose_files:
|
|
|
|
compose_cmd.write(f" --file {compose_file}")
|
|
|
|
compose_cmd.write(f" {cmd}")
|
|
|
|
|
|
|
|
logging.info(compose_cmd.getvalue())
|
2017-01-25 01:11:36 +01:00
|
|
|
try:
|
2024-12-30 11:59:07 +01:00
|
|
|
subprocess.check_output(shlex.split(compose_cmd.getvalue()), stderr=subprocess.STDOUT)
|
2021-03-18 22:48:13 +01:00
|
|
|
except subprocess.CalledProcessError as e:
|
2024-12-30 11:59:07 +01:00
|
|
|
pytest.fail(f"Error while running '{compose_cmd.getvalue()}':\n{e.output}", pytrace=False)
|
2017-02-14 22:30:20 +01:00
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
def docker_compose_up(compose_files: List[str], project_name: str):
|
|
|
|
"""
|
|
|
|
Execute compose up --detach with the provided compose files and project name.
|
|
|
|
"""
|
|
|
|
if compose_files is None or len(compose_files) == 0:
|
|
|
|
pytest.fail(f"No compose file passed to docker_compose_up", pytrace=False)
|
|
|
|
__prepare_and_execute_compose_cmd(compose_files, project_name, cmd="up --detach")
|
|
|
|
|
|
|
|
|
|
|
|
def docker_compose_down(compose_files: List[str], project_name: str):
|
|
|
|
"""
|
|
|
|
Execute compose down --volumes with the provided compose files and project name.
|
|
|
|
"""
|
|
|
|
if compose_files is None or len(compose_files) == 0:
|
|
|
|
pytest.fail(f"No compose file passed to docker_compose_up", pytrace=False)
|
|
|
|
__prepare_and_execute_compose_cmd(compose_files, project_name, cmd="down --volumes")
|
2017-02-14 22:30:20 +01:00
|
|
|
|
2017-01-28 11:25:54 +01:00
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
def wait_for_nginxproxy_to_be_ready():
|
|
|
|
"""
|
2022-01-01 01:39:51 +13:00
|
|
|
If one (and only one) container started from image nginxproxy/nginx-proxy:test is found,
|
2016-12-10 00:53:23 +01:00
|
|
|
wait for its log to contain substring "Watching docker events"
|
|
|
|
"""
|
2021-04-01 16:06:09 +02:00
|
|
|
containers = docker_client.containers.list(filters={"ancestor": "nginxproxy/nginx-proxy:test"})
|
2016-12-10 00:53:23 +01:00
|
|
|
if len(containers) != 1:
|
|
|
|
return
|
|
|
|
container = containers[0]
|
2017-02-11 03:43:01 +01:00
|
|
|
for line in container.logs(stream=True):
|
2021-03-18 22:48:13 +01:00
|
|
|
if b"Watching docker events" in line:
|
2016-12-10 00:53:23 +01:00
|
|
|
logging.debug("nginx-proxy ready")
|
|
|
|
break
|
|
|
|
|
|
|
|
|
2023-02-05 19:56:14 -05:00
|
|
|
@pytest.fixture
|
2024-12-30 11:59:07 +01:00
|
|
|
def docker_compose_files(request: FixtureRequest) -> List[str]:
|
|
|
|
"""Fixture returning the docker compose files to consider:
|
|
|
|
|
|
|
|
If a YAML file exists with the same name as the test module (with the `.py` extension
|
|
|
|
replaced with `.base.yml`, ie `test_foo.py`-> `test_foo.base.yml`) and in the same
|
|
|
|
directory as the test module, use only that file.
|
|
|
|
|
|
|
|
Otherwise, merge the following files in this order:
|
2023-01-06 01:25:35 -05:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
- the `compose.base.yml` file in the parent `test` directory.
|
|
|
|
- if present in the same directory as the test module, the `compose.base.override.yml` file.
|
|
|
|
- the YAML file named after the current test module (ie `test_foo.py`-> `test_foo.yml`)
|
2023-01-06 01:25:35 -05:00
|
|
|
|
|
|
|
Tests can override this fixture to specify a custom location.
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
2024-12-30 11:59:07 +01:00
|
|
|
compose_files: List[str] = []
|
|
|
|
test_module_path = pathlib.Path(request.module.__file__).parent
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
module_base_file = test_module_path.joinpath(f"{request.module.__name__}.base.yml")
|
|
|
|
if module_base_file.is_file():
|
|
|
|
return [module_base_file.as_posix()]
|
2024-12-26 16:12:10 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
global_base_file = test_module_path.parent.joinpath("compose.base.yml")
|
|
|
|
if global_base_file.is_file():
|
|
|
|
compose_files.append(global_base_file.as_posix())
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
module_base_override_file = test_module_path.joinpath("compose.base.override.yml")
|
|
|
|
if module_base_override_file.is_file():
|
|
|
|
compose_files.append(module_base_override_file.as_posix())
|
|
|
|
|
|
|
|
module_compose_file = test_module_path.joinpath(f"{request.module.__name__}.yml")
|
|
|
|
if module_compose_file.is_file():
|
|
|
|
compose_files.append(module_compose_file.as_posix())
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
if not module_base_file.is_file() and not module_compose_file.is_file():
|
|
|
|
logging.error(
|
|
|
|
f"Could not find any docker compose file named '{module_base_file.name}' or '{module_compose_file.name}'"
|
|
|
|
)
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
logging.debug(f"using docker compose files {compose_files}")
|
|
|
|
return compose_files
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def connect_to_network(network: Network) -> Optional[Network]:
|
2017-01-28 11:25:54 +01:00
|
|
|
"""
|
2017-02-11 00:02:42 +01:00
|
|
|
If we are running from a container, connect our container to the given network
|
2017-01-28 11:25:54 +01:00
|
|
|
|
|
|
|
:return: the name of the network we were connected to, or None
|
|
|
|
"""
|
2021-12-31 20:43:02 +13:00
|
|
|
if PYTEST_RUNNING_IN_CONTAINER:
|
2017-02-11 22:41:09 +01:00
|
|
|
try:
|
2021-12-31 21:51:40 +13:00
|
|
|
my_container = docker_client.containers.get(test_container)
|
2017-02-11 22:41:09 +01:00
|
|
|
except docker.errors.NotFound:
|
2024-12-24 13:53:09 +01:00
|
|
|
logging.warning(f"container {test_container} not found")
|
2024-12-26 16:12:10 +01:00
|
|
|
return None
|
2017-02-11 03:43:01 +01:00
|
|
|
|
2017-01-28 11:25:54 +01:00
|
|
|
# figure out our container networks
|
2021-03-18 22:48:13 +01:00
|
|
|
my_networks = list(my_container.attrs["NetworkSettings"]["Networks"].keys())
|
2017-01-28 11:25:54 +01:00
|
|
|
|
2021-12-31 22:14:26 +13:00
|
|
|
# If the pytest container is using host networking, it cannot connect to container networks (not required with host network)
|
|
|
|
if 'host' in my_networks:
|
|
|
|
return None
|
|
|
|
|
2022-01-01 01:38:13 +13:00
|
|
|
# Make sure our container is connected to the nginx-proxy's network,
|
|
|
|
# but avoid connecting to `none` network (not valid) with `test_server-down` tests
|
|
|
|
if network.name not in my_networks and network.name != 'none':
|
2021-03-19 12:12:24 +01:00
|
|
|
logging.info(f"Connecting to docker network: {network.name}")
|
2017-02-11 03:43:01 +01:00
|
|
|
network.connect(my_container)
|
2017-02-11 00:02:42 +01:00
|
|
|
return network
|
2017-01-28 11:25:54 +01:00
|
|
|
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def disconnect_from_network(network: Network = None):
|
2017-01-28 11:25:54 +01:00
|
|
|
"""
|
|
|
|
If we are running from a container, disconnect our container from the given network.
|
|
|
|
|
|
|
|
:param network: name of a docker network to disconnect from
|
|
|
|
"""
|
2021-12-31 20:43:02 +13:00
|
|
|
if PYTEST_RUNNING_IN_CONTAINER and network is not None:
|
2017-02-11 22:41:09 +01:00
|
|
|
try:
|
2021-12-31 21:51:40 +13:00
|
|
|
my_container = docker_client.containers.get(test_container)
|
2017-02-11 22:41:09 +01:00
|
|
|
except docker.errors.NotFound:
|
2024-12-24 13:53:09 +01:00
|
|
|
logging.warning(f"container {test_container} not found")
|
2017-02-11 22:41:09 +01:00
|
|
|
return
|
2017-02-11 03:43:01 +01:00
|
|
|
|
2017-01-28 11:25:54 +01:00
|
|
|
# figure out our container networks
|
2021-03-18 22:48:13 +01:00
|
|
|
my_networks_names = list(my_container.attrs["NetworkSettings"]["Networks"].keys())
|
2017-01-28 11:25:54 +01:00
|
|
|
|
|
|
|
# disconnect our container from the given network
|
2017-02-11 03:43:01 +01:00
|
|
|
if network.name in my_networks_names:
|
2021-03-19 12:12:24 +01:00
|
|
|
logging.info(f"Disconnecting from network {network.name}")
|
2017-02-11 03:43:01 +01:00
|
|
|
network.disconnect(my_container)
|
2017-01-28 11:25:54 +01:00
|
|
|
|
|
|
|
|
2024-12-26 16:12:10 +01:00
|
|
|
def connect_to_all_networks() -> List[Network]:
|
2017-02-11 00:02:42 +01:00
|
|
|
"""
|
2017-02-11 01:56:23 +01:00
|
|
|
If we are running from a container, connect our container to all current docker networks.
|
2017-02-11 00:02:42 +01:00
|
|
|
|
2017-02-11 01:56:23 +01:00
|
|
|
:return: a list of networks we connected to
|
2017-02-11 00:02:42 +01:00
|
|
|
"""
|
2021-12-31 20:43:02 +13:00
|
|
|
if not PYTEST_RUNNING_IN_CONTAINER:
|
2017-02-11 01:56:23 +01:00
|
|
|
return []
|
|
|
|
else:
|
|
|
|
# find the list of docker networks
|
2021-12-31 22:30:49 +13:00
|
|
|
networks = [network for network in docker_client.networks.list(greedy=True) if len(network.containers) > 0 and network.name != 'bridge']
|
2017-02-11 01:56:23 +01:00
|
|
|
return [connect_to_network(network) for network in networks]
|
2017-02-11 00:02:42 +01:00
|
|
|
|
|
|
|
|
2023-02-05 19:38:55 -05:00
|
|
|
class DockerComposer(contextlib.AbstractContextManager):
|
|
|
|
def __init__(self):
|
2024-12-24 13:53:09 +01:00
|
|
|
self._networks = None
|
2024-12-30 11:59:07 +01:00
|
|
|
self._docker_compose_files = None
|
|
|
|
self._project_name = None
|
2023-02-05 19:38:55 -05:00
|
|
|
|
|
|
|
def __exit__(self, *exc_info):
|
|
|
|
self._down()
|
|
|
|
|
|
|
|
def _down(self):
|
2024-12-30 11:59:07 +01:00
|
|
|
if self._docker_compose_files is None:
|
2023-02-05 19:38:55 -05:00
|
|
|
return
|
|
|
|
for network in self._networks:
|
|
|
|
disconnect_from_network(network)
|
2024-12-30 11:59:07 +01:00
|
|
|
docker_compose_down(self._docker_compose_files, self._project_name)
|
2023-02-05 19:38:55 -05:00
|
|
|
self._docker_compose_file = None
|
2024-12-30 11:59:07 +01:00
|
|
|
self._project_name = None
|
2023-02-05 19:38:55 -05:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
def compose(self, docker_compose_files: List[str], project_name: str):
|
|
|
|
if docker_compose_files == self._docker_compose_files and project_name == self._project_name:
|
2023-02-05 19:38:55 -05:00
|
|
|
return
|
|
|
|
self._down()
|
2024-12-30 11:59:07 +01:00
|
|
|
if docker_compose_files is None or project_name is None:
|
2023-02-05 19:38:55 -05:00
|
|
|
return
|
2024-12-30 11:59:07 +01:00
|
|
|
docker_compose_up(docker_compose_files, project_name)
|
2023-02-05 19:38:55 -05:00
|
|
|
self._networks = connect_to_all_networks()
|
|
|
|
wait_for_nginxproxy_to_be_ready()
|
|
|
|
time.sleep(3) # give time to containers to be ready
|
2024-12-30 11:59:07 +01:00
|
|
|
self._docker_compose_files = docker_compose_files
|
|
|
|
self._project_name = project_name
|
2023-02-05 19:38:55 -05:00
|
|
|
|
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
###############################################################################
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2016-12-10 00:53:23 +01:00
|
|
|
# Py.test fixtures
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2016-12-10 00:53:23 +01:00
|
|
|
###############################################################################
|
|
|
|
|
2023-02-05 02:44:12 -05:00
|
|
|
|
2023-02-05 19:38:55 -05:00
|
|
|
@pytest.fixture(scope="module")
|
2024-12-27 21:36:07 +01:00
|
|
|
def docker_composer() -> Iterator[DockerComposer]:
|
2023-02-05 19:38:55 -05:00
|
|
|
with DockerComposer() as d:
|
|
|
|
yield d
|
|
|
|
|
|
|
|
|
2023-02-05 02:44:12 -05:00
|
|
|
@pytest.fixture
|
2024-12-27 21:36:07 +01:00
|
|
|
def ca_root_certificate() -> Iterator[str]:
|
2024-12-26 16:12:10 +01:00
|
|
|
yield CA_ROOT_CERTIFICATE
|
2023-02-05 02:44:12 -05:00
|
|
|
|
|
|
|
|
2023-02-05 19:56:14 -05:00
|
|
|
@pytest.fixture
|
2023-02-05 19:28:30 -05:00
|
|
|
def monkey_patched_dns():
|
|
|
|
original_dns_resolver = monkey_patch_urllib_dns_resolver()
|
|
|
|
yield
|
|
|
|
restore_urllib_dns_resolver(original_dns_resolver)
|
|
|
|
|
|
|
|
|
2023-02-05 19:56:14 -05:00
|
|
|
@pytest.fixture
|
2024-12-30 11:59:07 +01:00
|
|
|
def docker_compose(
|
|
|
|
request: FixtureRequest,
|
|
|
|
monkeypatch,
|
|
|
|
monkey_patched_dns,
|
|
|
|
docker_composer,
|
|
|
|
docker_compose_files
|
|
|
|
) -> Iterator[DockerClient]:
|
|
|
|
"""
|
|
|
|
Ensures containers necessary for the test module are started in a compose project,
|
|
|
|
and set the environment variable `PYTEST_MODULE_PATH` to the test module's parent folder.
|
2022-01-01 01:39:51 +13:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
A list of custom docker compose files path can be specified by overriding
|
|
|
|
the `docker_compose_file` fixture.
|
2022-01-01 01:39:51 +13:00
|
|
|
|
2024-12-30 11:59:07 +01:00
|
|
|
Also, in the case where pytest is running from a docker container, this fixture
|
|
|
|
makes sure our container will be attached to all the docker networks.
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
2024-12-30 11:59:07 +01:00
|
|
|
pytest_module_path = pathlib.Path(request.module.__file__).parent
|
|
|
|
monkeypatch.setenv("PYTEST_MODULE_PATH", pytest_module_path.as_posix())
|
|
|
|
|
|
|
|
project_name = request.module.__name__
|
|
|
|
docker_composer.compose(docker_compose_files, project_name)
|
|
|
|
|
2017-02-11 22:41:09 +01:00
|
|
|
yield docker_client
|
2016-12-10 00:53:23 +01:00
|
|
|
|
|
|
|
|
2024-12-27 21:36:07 +01:00
|
|
|
@pytest.fixture
|
|
|
|
def nginxproxy() -> Iterator[RequestsForDocker]:
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
|
|
|
Provides the `nginxproxy` object that can be used in the same way the requests module is:
|
|
|
|
|
2024-12-24 13:53:09 +01:00
|
|
|
r = nginxproxy.get("https://foo.com")
|
2016-12-10 00:53:23 +01:00
|
|
|
|
|
|
|
The difference is that in case an HTTP requests has status code 404 or 502 (which mostly
|
2017-02-14 01:39:08 +01:00
|
|
|
indicates that nginx has just reloaded), we retry up to 30 times the query.
|
|
|
|
|
|
|
|
Also, the nginxproxy methods accept an additional keyword parameter: `ipv6` which forces requests
|
|
|
|
made against containers to use the containers IPv6 address when set to `True`. If IPv6 is not
|
|
|
|
supported by the system or docker, that particular test will be skipped.
|
2016-12-10 00:53:23 +01:00
|
|
|
"""
|
2024-12-24 13:53:09 +01:00
|
|
|
yield RequestsForDocker()
|
2016-12-10 00:53:23 +01:00
|
|
|
|
|
|
|
|
2024-12-27 21:36:07 +01:00
|
|
|
@pytest.fixture
|
|
|
|
def acme_challenge_path() -> Iterator[str]:
|
2024-05-04 23:52:57 +03:00
|
|
|
"""
|
|
|
|
Provides fake Let's Encrypt ACME challenge path used in certain tests
|
|
|
|
"""
|
2024-12-26 16:12:10 +01:00
|
|
|
yield ".well-known/acme-challenge/test-filename"
|
2024-05-04 23:52:57 +03:00
|
|
|
|
2016-12-10 00:53:23 +01:00
|
|
|
###############################################################################
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2016-12-10 00:53:23 +01:00
|
|
|
# Py.test hooks
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2016-12-10 00:53:23 +01:00
|
|
|
###############################################################################
|
|
|
|
|
2024-12-24 16:23:09 +01:00
|
|
|
# pytest hook to display additional stuff in test report
|
2016-12-10 00:53:23 +01:00
|
|
|
def pytest_runtest_logreport(report):
|
|
|
|
if report.failed:
|
2024-12-24 16:23:09 +01:00
|
|
|
test_containers = docker_client.containers.list(all=True, filters={"ancestor": "nginxproxy/nginx-proxy:test"})
|
|
|
|
for container in test_containers:
|
|
|
|
report.longrepr.addsection('nginx-proxy logs', container.logs().decode())
|
|
|
|
report.longrepr.addsection('nginx-proxy conf', get_nginx_conf_from_container(container).decode())
|
2016-12-10 00:53:23 +01:00
|
|
|
|
2017-02-11 02:23:07 +01:00
|
|
|
|
2017-02-21 01:04:50 +01:00
|
|
|
# Py.test `incremental` marker, see http://stackoverflow.com/a/12579625/107049
|
|
|
|
def pytest_runtest_makereport(item, call):
|
|
|
|
if "incremental" in item.keywords:
|
|
|
|
if call.excinfo is not None:
|
|
|
|
parent = item.parent
|
|
|
|
parent._previousfailed = item
|
|
|
|
|
|
|
|
|
|
|
|
def pytest_runtest_setup(item):
|
|
|
|
previousfailed = getattr(item.parent, "_previousfailed", None)
|
|
|
|
if previousfailed is not None:
|
2021-03-19 12:12:24 +01:00
|
|
|
pytest.xfail(f"previous test failed ({previousfailed.name})")
|
2017-02-11 02:23:07 +01:00
|
|
|
|
|
|
|
###############################################################################
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2017-02-11 02:23:07 +01:00
|
|
|
# Check requirements
|
2022-01-01 01:39:51 +13:00
|
|
|
#
|
2017-02-11 02:23:07 +01:00
|
|
|
###############################################################################
|
|
|
|
|
2017-02-11 03:43:01 +01:00
|
|
|
try:
|
2021-04-01 16:06:09 +02:00
|
|
|
docker_client.images.get('nginxproxy/nginx-proxy:test')
|
2017-02-11 03:43:01 +01:00
|
|
|
except docker.errors.ImageNotFound:
|
2021-04-01 16:06:09 +02:00
|
|
|
pytest.exit("The docker image 'nginxproxy/nginx-proxy:test' is missing")
|
2017-02-11 02:23:07 +01:00
|
|
|
|
2024-12-18 11:35:01 +01:00
|
|
|
if Version(docker.__version__) < Version("7.0.0"):
|
|
|
|
pytest.exit("This test suite is meant to work with the python docker module v7.0.0 or later")
|