Refactor test_create, test_proxy, and test_env for test isolation (#2416)

* Deactivate PyCQA line-length warnings

* Proper os.environ restore in pytest

* Add --mamba-pkgs-dir for cache reuse in Pytest

* Set pytest tmp retention policy

* Allow Path in test helpers

* Refactor test_create for isolation

* Improve pytest fixtures

* Refactor test_env.py for test isolation

* Refactor test_proxy for isolating tests
This commit is contained in:
Antoine Prouvost 2023-03-29 19:30:38 +02:00 committed by GitHub
parent 1d7fb17468
commit 05f1b9b6b3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 989 additions and 1025 deletions

2
micromamba/setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[pycodestyle]
ignore = E5,W1,W2,W3,W5

View File

@ -1,12 +1,32 @@
import copy
import os
import pathlib
import platform
from typing import Generator
from typing import Any, Generator, Mapping
import pytest
from . import helpers
####################
# Config options #
####################
def pytest_addoption(parser):
"""Add pkgs-dir command line argument to pytest."""
parser.addoption(
"--mamba-pkgs-dir",
action="store",
default=None,
help="Package cache to resuse between tests",
)
##################
# Test fixture #
##################
@pytest.fixture
def tmp_home(tmp_path: pathlib.Path) -> Generator[pathlib.Path, None, None]:
@ -31,11 +51,15 @@ def tmp_home(tmp_path: pathlib.Path) -> Generator[pathlib.Path, None, None]:
@pytest.fixture(scope="session")
def tmp_pkgs_dirs(tmp_path_factory: pytest.TempPathFactory) -> pathlib.Path:
def tmp_pkgs_dirs(tmp_path_factory: pytest.TempPathFactory, request) -> pathlib.Path:
"""A common package cache for mamba downloads.
The directory is not used automatically when calling this fixture.
"""
if (p := request.config.getoption("--mamba-pkgs-dir")) is not None:
p = pathlib.Path(p)
p.mkdir(parents=True, exist_ok=True)
return p
return tmp_path_factory.mktemp("pkgs_dirs")
@ -45,19 +69,29 @@ def shared_pkgs_dirs(request) -> bool:
return request.param
@pytest.fixture
def tmp_environ() -> Generator[Mapping[str, Any], None, None]:
"""Saves and restore environment variables.
This is used for test that need to modify ``os.environ``
"""
old_environ = copy.deepcopy(os.environ)
yield old_environ
os.environ.clear()
os.environ.update(old_environ)
@pytest.fixture
def tmp_clean_env(
tmp_pkgs_dirs: pathlib.Path, shared_pkgs_dirs: bool
tmp_pkgs_dirs: pathlib.Path, shared_pkgs_dirs: bool, tmp_environ: None
) -> Generator[None, None, None]:
"""Remove all Conda/Mamba activation artifacts from environment."""
saved_environ = {}
for k, v in os.environ.items():
if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA")):
saved_environ[k] = v
del os.environ[k]
def keep_in_path(
p: str, prefix: str | None = saved_environ.get("CONDA_PREFIX")
p: str, prefix: str | None = tmp_environ.get("CONDA_PREFIX")
) -> bool:
if "condabin" in p:
return False
@ -77,8 +111,6 @@ def tmp_clean_env(
yield None
os.environ.update(saved_environ)
@pytest.fixture(params=[helpers.random_string, "long_prefix_" * 20])
def tmp_env_name(request) -> str:
@ -93,15 +125,11 @@ def tmp_root_prefix(
tmp_path: pathlib.Path, tmp_clean_env: None
) -> Generator[pathlib.Path, None, None]:
"""Change the micromamba root directory to a tmp folder for the duration of a test."""
old_root_prefix = os.environ.get("MAMBA_ROOT_PREFIX")
new_root_prefix = tmp_path / "mamba"
new_root_prefix.mkdir(parents=True, exist_ok=True)
os.environ["MAMBA_ROOT_PREFIX"] = str(new_root_prefix)
yield new_root_prefix
if old_root_prefix is not None:
os.environ["MAMBA_ROOT_PREFIX"] = old_root_prefix
else:
del os.environ["MAMBA_ROOT_PREFIX"]
# os.environ restored by tmp_clean_env and tmp_environ
@pytest.fixture
@ -110,20 +138,12 @@ def tmp_empty_env(
) -> Generator[pathlib.Path, None, None]:
"""An empty envirnment created under a temporary root prefix."""
helpers.create("-n", tmp_env_name, no_dry_run=True)
yield tmp_root_prefix
yield tmp_root_prefix / "envs" / tmp_env_name
@pytest.fixture
def tmp_prefix(
tmp_root_prefix: pathlib.Path, tmp_env_name: str
) -> Generator[pathlib.Path, None, None]:
def tmp_prefix(tmp_empty_env: pathlib.Path) -> Generator[pathlib.Path, None, None]:
"""Change the conda prefix to a tmp folder for the duration of a test."""
old_prefix = os.environ.get("CONDA_PREFIX")
new_prefix = tmp_root_prefix / "envs" / tmp_env_name
new_prefix.mkdir(parents=True, exist_ok=True)
os.environ["CONDA_PREFIX"] = str(new_prefix)
yield new_prefix
if old_prefix is not None:
os.environ["CONDA_PREFIX"] = old_prefix
else:
del os.environ["CONDA_PREFIX"]
os.environ["CONDA_PREFIX"] = str(tmp_empty_env)
yield tmp_empty_env
# os.environ restored by tmp_environ through tmp_root_prefix

View File

@ -166,7 +166,7 @@ def create(
create_cmd="create",
):
umamba = get_umamba()
cmd = [umamba] + create_cmd.split() + [arg for arg in args if arg]
cmd = [umamba] + create_cmd.split() + [str(arg) for arg in args if arg]
if "--print-config-only" in args:
cmd += ["--debug"]
@ -270,7 +270,7 @@ def update(*args, default_channel=True, no_rc=True, no_dry_run=False):
def run_env(*args, f=None):
umamba = get_umamba()
cmd = [umamba, "env"] + [arg for arg in args if arg]
cmd = [umamba, "env"] + [str(arg) for arg in args if arg]
res = subprocess_run(*cmd)
@ -284,7 +284,7 @@ def run_env(*args, f=None):
def umamba_list(*args):
umamba = get_umamba()
cmd = [umamba, "list"] + [arg for arg in args if arg]
cmd = [umamba, "list"] + [str(arg) for arg in args if arg]
res = subprocess_run(*cmd)
if "--json" in args:
@ -297,7 +297,7 @@ def umamba_list(*args):
def umamba_run(*args, **kwargs):
umamba = get_umamba()
cmd = [umamba, "run"] + [arg for arg in args if arg]
cmd = [umamba, "run"] + [str(arg) for arg in args if arg]
res = subprocess_run(*cmd, **kwargs)
if "--json" in args:
@ -310,7 +310,7 @@ def umamba_run(*args, **kwargs):
def umamba_repoquery(*args, no_rc=True):
umamba = get_umamba()
cmd = [umamba, "repoquery"] + [arg for arg in args if arg]
cmd = [umamba, "repoquery"] + [str(arg) for arg in args if arg]
if no_rc:
cmd += ["--no-rc"]

File diff suppressed because it is too large Load Diff

View File

@ -1,152 +1,103 @@
import os
import platform
import shutil
from pathlib import Path
import pytest
import yaml
from .helpers import *
from . import helpers
__this_dir__ = Path(__file__).parent.resolve()
class TestEnv:
def test_env_list(tmp_home, tmp_root_prefix, tmp_empty_env):
env_json = helpers.run_env("list", "--json")
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
cache = os.path.join(current_root_prefix, "pkgs")
assert "envs" in env_json
assert len(env_json["envs"]) >= 2
assert str(tmp_root_prefix) in env_json["envs"]
assert str(tmp_empty_env) in env_json["envs"]
env_name_1 = random_string()
env_name_2 = random_string()
env_name_3 = random_string()
root_prefix = Path(os.path.join("~", "tmproot" + random_string())).expanduser()
@classmethod
def setup_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = str(cls.root_prefix)
os.environ["CONDA_PREFIX"] = str(cls.root_prefix)
def test_env_list_table(tmp_home, tmp_root_prefix, tmp_prefix):
res = helpers.run_env("list")
# speed-up the tests
os.environ["CONDA_PKGS_DIRS"] = cls.cache
assert "Name" in res
assert "base" in res
assert str(tmp_root_prefix) in res
all_lines = res.splitlines()
print("\n".join(all_lines))
for line in all_lines:
if "*" in line:
active_env_l = line
assert str(tmp_root_prefix) in active_env_l
res = create(
f"",
"-n",
cls.env_name_1,
"--json",
no_dry_run=True,
)
os.environ["CONDA_PREFIX"] = str(tmp_prefix)
@classmethod
def setup(cls):
pass
res = helpers.run_env("list")
@classmethod
def teardown_class(cls):
# Unregister / remove all test envs
run_env("remove", "-n", cls.env_name_1, "-y")
run_env("remove", "-n", cls.env_name_3, "-y")
run_env("remove", "-n", "env-create-export", "-y")
all_lines = res.splitlines()
for line in all_lines:
if "*" in line:
active_env_l = line
assert str(tmp_prefix) in active_env_l
os.environ["MAMBA_ROOT_PREFIX"] = cls.current_root_prefix
os.environ["CONDA_PREFIX"] = cls.current_prefix
os.environ.pop("CONDA_PKGS_DIRS")
shutil.rmtree(cls.root_prefix)
def test_env_list(self):
env_json = run_env("list", "--json")
env_1_fp = str(self.root_prefix / "envs" / self.env_name_1)
def test_register_new_env(tmp_home, tmp_root_prefix):
helpers.create("-n", "env2", "--json", no_dry_run=True)
helpers.create("-n", "env3", "--json", no_dry_run=True)
assert "envs" in env_json
assert len(env_json["envs"]) >= 2
assert str(self.root_prefix) in env_json["envs"]
assert env_1_fp in env_json["envs"]
env_json = helpers.run_env("list", "--json")
env_2_fp = tmp_root_prefix / "envs" / "env2"
env_3_fp = tmp_root_prefix / "envs" / "env3"
assert str(env_2_fp) in env_json["envs"]
assert str(env_3_fp) in env_json["envs"]
def test_env_list_table(self):
res = run_env("list")
shutil.rmtree(env_2_fp)
env_json = helpers.run_env("list", "--json")
assert str(env_2_fp) not in env_json["envs"]
assert str(env_3_fp) in env_json["envs"]
assert "Name" in res
assert "base" in res
assert str(self.root_prefix) in res
lines = res.splitlines()
for l in lines:
if "*" in l:
active_env_l = l
assert str(self.root_prefix) in active_env_l
full_env = self.root_prefix / "envs" / self.env_name_1
os.environ["CONDA_PREFIX"] = str(full_env)
def test_env_export(tmp_home, tmp_root_prefix):
env_name = "env-create-export"
spec_file = __this_dir__ / "env-create-export.yaml"
helpers.create("-n", env_name, "-f", spec_file)
ret = yaml.safe_load(helpers.run_env("export", "-n", env_name))
assert ret["name"] == env_name
assert set(ret["channels"]) == {"conda-forge"}
assert "micromamba=0.24.0=0" in ret["dependencies"]
res = run_env("list")
lines = res.splitlines()
for l in lines:
if "*" in l:
active_env_l = l
assert str(full_env) in active_env_l
def test_create():
"""Tests for ``micromamba env create`` can be found in ``test_create.py``.
os.environ["CONDA_PREFIX"] = str(self.root_prefix)
Look for 'create_cmd'.
"""
pass
def test_register_new_env(self):
res = create(
f"",
"-n",
self.env_name_2,
"--json",
no_dry_run=True,
)
res = create(
f"",
"-n",
self.env_name_3,
"--json",
no_dry_run=True,
)
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_env_remove(tmp_home, tmp_root_prefix):
env_name = "env-create-remove"
env_fp = tmp_root_prefix / "envs" / env_name
conda_env_file = tmp_home / ".conda/environments.txt"
env_json = run_env("list", "--json")
env_2_fp = str(self.root_prefix / "envs" / self.env_name_2)
env_3_fp = str(self.root_prefix / "envs" / self.env_name_3)
assert str(env_2_fp) in env_json["envs"]
assert str(env_3_fp) in env_json["envs"]
# Create env with xtensor
helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
shutil.rmtree(env_2_fp)
env_json = run_env("list", "--json")
assert env_2_fp not in env_json["envs"]
assert env_3_fp in env_json["envs"]
env_json = helpers.run_env("list", "--json")
assert str(env_fp) in env_json["envs"]
assert env_fp.exists()
with open(conda_env_file, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f]
assert str(env_fp) in lines
def test_env_export(self):
env_name = "env-create-export"
spec_file = Path(__file__).parent / "env-create-export.yaml"
create("", "-n", env_name, "-f", spec_file)
ret = yaml.safe_load(run_env("export", "-n", env_name))
assert ret["name"] == env_name
assert set(ret["channels"]) == {"conda-forge"}
assert "micromamba=0.24.0=0" in ret["dependencies"]
def test_create(self):
# Tests for 'micromamba env create' can be found in 'test_create.py' (look for 'create_cmd')
pass
def test_env_remove(self):
env_name = "env-create-remove"
env_fp = str(self.root_prefix / "envs" / env_name)
conda_env_file = Path(os.path.join("~", ".conda/environments.txt")).expanduser()
# Create env with xtensor
res = create("xtensor", "-n", env_name, "--json", no_dry_run=True)
env_json = run_env("list", "--json")
assert env_fp in env_json["envs"]
assert Path(env_fp).expanduser().exists()
with open(conda_env_file, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f]
assert env_fp in lines
# Unregister / remove env_name
run_env("remove", "-n", env_name, "-y")
env_json = run_env("list", "--json")
assert env_fp not in env_json["envs"]
assert not Path(env_fp).expanduser().exists()
with open(conda_env_file, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f]
assert env_fp not in lines
# Unregister / remove env_name
helpers.run_env("remove", "-n", env_name, "-y")
env_json = helpers.run_env("list", "--json")
assert str(env_fp) not in env_json["envs"]
assert not env_fp.exists()
with open(conda_env_file, "r", encoding="utf-8") as f:
lines = [line.strip() for line in f]
assert str(env_fp) not in lines

View File

@ -1,137 +1,117 @@
import asyncio
import os
import shutil
import subprocess
import time
import urllib.parse
from pathlib import Path
from subprocess import TimeoutExpired
from .helpers import *
import pytest
from . import helpers
__this_dir__ = Path(__file__).parent.resolve()
class TestProxy:
@pytest.fixture
def mitmdump_exe():
"""Get the path to the ``mitmdump`` executable.
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
If the executable is provided in a conda environment, this fixture needs to be called
before ``tmp_root_prefix`` and the like, as they will clean the ``PATH``.
"""
return Path(shutil.which("mitmdump")).resolve()
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
mitm_exe = shutil.which("mitmdump")
mitm_confdir = os.path.join(root_prefix, "mitmproxy")
mitm_dump_path = os.path.join(root_prefix, "dump.json")
proxy_process = None
@classmethod
def setup_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestProxy.root_prefix
os.environ["CONDA_PREFIX"] = TestProxy.prefix
def setup_method(self):
create("-n", TestProxy.env_name, "--offline", no_dry_run=True)
@classmethod
def teardown_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestProxy.current_root_prefix
os.environ["CONDA_PREFIX"] = TestProxy.current_prefix
def teardown_method(self):
shutil.rmtree(TestProxy.root_prefix)
class MitmProxy:
def __init__(self, exe: Path, conf: Path, dump: Path):
self.exe = Path(exe).resolve()
self.conf = Path(conf).resolve()
self.dump = Path(dump).resolve()
self.process = None
def start_proxy(self, port, options=[]):
assert self.proxy_process is None
script = Path(__file__).parent / "dump_proxy_connections.py"
self.proxy_process = subprocess.Popen(
assert self.process is None
self.process = subprocess.Popen(
[
TestProxy.mitm_exe,
self.exe,
"--listen-port",
str(port),
"--scripts",
script,
str(__this_dir__ / "dump_proxy_connections.py"),
"--set",
f"outfile={TestProxy.mitm_dump_path}",
f"outfile={self.dump}",
"--set",
f"confdir={TestProxy.mitm_confdir}",
f"confdir={self.conf}",
*options,
]
)
# Wait until mitmproxy has generated its certificate or some tests might fail
while not (Path(TestProxy.mitm_confdir) / "mitmproxy-ca-cert.pem").exists():
while not (Path(self.conf) / "mitmproxy-ca-cert.pem").exists():
time.sleep(1)
def stop_proxy(self):
self.proxy_process.terminate()
self.process.terminate()
try:
self.proxy_process.wait(3)
except TimeoutExpired:
self.proxy_process.kill()
self.proxy_process = None
self.process.wait(3)
except subprocess.TimeoutExpired:
self.process.kill()
self.process = None
@pytest.mark.parametrize(
"auth",
[
None,
"foo:bar",
"user%40example.com:pass",
],
@pytest.mark.parametrize("auth", [None, "foo:bar", "user%40example.com:pass"])
@pytest.mark.parametrize("ssl_verify", (True, False))
def test_proxy_install(
mitmdump_exe, tmp_home, tmp_prefix, tmp_path, unused_tcp_port, auth, ssl_verify
):
"""
This test makes sure micromamba follows the proxy settings in .condarc
It starts mitmproxy with the `dump_proxy_connections.py` script, which dumps all requested urls in a text file.
After that micromamba is used to install a package, while pointing it to that mitmproxy instance. Once
micromamba finished the proxy server is stopped and the urls micromamba requested are compared to the urls
mitmproxy intercepted, making sure that all the requests went through the proxy.
"""
if auth is not None:
proxy_options = ["--proxyauth", urllib.parse.unquote(auth)]
proxy_url = "http://{}@localhost:{}".format(auth, unused_tcp_port)
else:
proxy_options = []
proxy_url = "http://localhost:{}".format(unused_tcp_port)
proxy = MitmProxy(
exe=mitmdump_exe,
conf=(tmp_path / "mitmproxy-conf"),
dump=(tmp_path / "mitmproxy-dump"),
)
@pytest.mark.parametrize("ssl_verify", (True, False))
def test_install(self, unused_tcp_port, auth, ssl_verify):
"""
This test makes sure micromamba follows the proxy settings in .condarc
proxy.start_proxy(unused_tcp_port, proxy_options)
It starts mitmproxy with the `dump_proxy_connections.py` script, which dumps all requested urls in a text file.
After that micromamba is used to install a package, while pointing it to that mitmproxy instance. Once
micromamba finished the proxy server is stopped and the urls micromamba requested are compared to the urls
mitmproxy intercepted, making sure that all the requests went through the proxy.
"""
rc_file = tmp_prefix / "rc.yaml"
verify_string = proxy.conf / "mitmproxy-ca-cert.pem" if ssl_verify else "false"
if auth is not None:
proxy_options = ["--proxyauth", urllib.parse.unquote(auth)]
proxy_url = "http://{}@localhost:{}".format(auth, unused_tcp_port)
else:
proxy_options = []
proxy_url = "http://localhost:{}".format(unused_tcp_port)
file_content = [
"proxy_servers:",
" http: {}".format(proxy_url),
" https: {}".format(proxy_url),
"ssl_verify: {}".format(verify_string),
]
with open(rc_file, "w") as f:
f.write("\n".join(file_content))
self.start_proxy(unused_tcp_port, proxy_options)
cmd = ["xtensor", "--rc-file", rc_file]
if os.name == "nt":
# The certificates generated by mitmproxy don't support revocation.
# The schannel backend curl uses on Windows fails revocation check if revocation isn't supported. Other
# backends succeed revocation check in that case.
cmd += ["--ssl-no-revoke"]
cmd = ["xtensor"]
f_name = random_string() + ".yaml"
rc_file = os.path.join(TestProxy.prefix, f_name)
res = helpers.install(*cmd, "--json", no_rc=False)
if ssl_verify:
verify_string = os.path.abspath(
os.path.join(TestProxy.mitm_confdir, "mitmproxy-ca-cert.pem")
)
else:
verify_string = "false"
proxy.stop_proxy()
file_content = [
"proxy_servers:",
" http: {}".format(proxy_url),
" https: {}".format(proxy_url),
"ssl_verify: {}".format(verify_string),
]
with open(rc_file, "w") as f:
f.write("\n".join(file_content))
with open(proxy.dump, "r") as f:
proxied_requests = f.read().splitlines()
cmd += ["--rc-file", rc_file]
if os.name == "nt":
# The certificates generated by mitmproxy don't support revocation.
# The schannel backend curl uses on Windows fails revocation check if revocation isn't supported. Other
# backends succeed revocation check in that case.
cmd += ["--ssl-no-revoke"]
res = install(*cmd, "--json", no_rc=False)
self.stop_proxy()
with open(TestProxy.mitm_dump_path, "r") as f:
proxied_requests = f.read().splitlines()
for fetch in res["actions"]["FETCH"]:
assert fetch["url"] in proxied_requests
for fetch in res["actions"]["FETCH"]:
assert fetch["url"] in proxied_requests

View File

@ -12,6 +12,8 @@ import pytest
from .helpers import *
__this_dir__ = Path(__file__).parent.resolve()
@pytest.mark.skipif(dry_run_tests == DryRun.ULTRA_DRY, reason="Running ultra dry tests")
class TestRemove:
@ -185,7 +187,6 @@ class TestRemove:
class TestRemoveConfig:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
@ -243,12 +244,9 @@ class TestRemoveConfig:
assert res["specs"] == specs
def test_remove_then_clean(self, env_created):
from .test_create import test_env_requires_pip_install_path
env_file = __this_dir__ / "env-requires-pip-install.yaml"
env_name = "env_to_clean"
create(
"-n", env_name, "-f", test_env_requires_pip_install_path, no_dry_run=True
)
create("-n", env_name, "-f", env_file, no_dry_run=True)
remove("-n", env_name, "pip", no_dry_run=True)
clean("-ay", no_dry_run=True)

View File

@ -4,3 +4,7 @@ requires = [
'setuptools >= 49.2.1',
'pybind11 >= 2.2',
]
[tool.pytest.ini_options]
minversion = "6.0"
tmp_path_retention_policy = "failed"