Micromamba tests improvements (#2517)

* Automatically restore os.environ in umamba tests

* Cleanup test_info

* Refactor conftest.py

* Eagerly clean micromamba test dir

* Refactor micromamba test_info with new fixtures

* Fix test self_update on Win
This commit is contained in:
Antoine Prouvost 2023-05-26 18:27:54 +02:00 committed by GitHub
parent fb3c4c79d4
commit ecb5a7edc5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 459 additions and 496 deletions

View File

@ -14,12 +14,24 @@ from . import helpers
def pytest_addoption(parser): def pytest_addoption(parser):
"""Add pkgs-dir command line argument to pytest.""" """Add command line argument to pytest."""
parser.addoption( parser.addoption(
"--mamba-pkgs-dir", "--mamba-pkgs-dir",
action="store", action="store",
default=None, default=None,
help="Package cache to resuse between tests", help="Package cache to reuse between tests",
)
parser.addoption(
"--no-eager-clean",
action="store_true",
default=False,
help=(
"Do not eagerly delete temporary folders such as HOME and MAMBA_ROOT_PREFIX"
"created during tests."
"These folders take a lot of disk space so we delete them eagerly."
"For debugging, it can be convenient to keep them."
"With this option, cleaning will fallback on the default pytest policy."
),
) )
@ -28,48 +40,7 @@ def pytest_addoption(parser):
################## ##################
@pytest.fixture @pytest.fixture(autouse=True)
def tmp_home(tmp_path: pathlib.Path) -> Generator[pathlib.Path, None, None]:
"""Change the home directory to a tmp folder for the duration of a test."""
# Try multiple combination for Unix/Windows
home_envs = ["HOME", "USERPROFILE"]
old_homes = {name: os.environ.get(name) for name in home_envs}
if len(home_envs) > 0:
new_home = tmp_path / "home"
new_home.mkdir(parents=True, exist_ok=True)
for env in home_envs:
os.environ[env] = str(new_home)
yield new_home
for env, home in old_homes.items():
if old_homes[env] is None:
del os.environ[env]
else:
os.environ[env] = home
else:
yield pathlib.Path.home()
@pytest.fixture(scope="session")
def tmp_pkgs_dirs(tmp_path_factory: pytest.TempPathFactory, request) -> pathlib.Path:
"""A common package cache for mamba downloads.
The directory is not used automatically when calling this fixture.
"""
if (p := request.config.getoption("--mamba-pkgs-dir")) is not None:
p = pathlib.Path(p)
p.mkdir(parents=True, exist_ok=True)
return p
return tmp_path_factory.mktemp("pkgs_dirs")
@pytest.fixture(params=[False])
def shared_pkgs_dirs(request) -> bool:
"""A dummy fixture to control the use of shared package dir."""
return request.param
@pytest.fixture
def tmp_environ() -> Generator[Mapping[str, Any], None, None]: def tmp_environ() -> Generator[Mapping[str, Any], None, None]:
"""Saves and restore environment variables. """Saves and restore environment variables.
@ -82,9 +53,31 @@ def tmp_environ() -> Generator[Mapping[str, Any], None, None]:
@pytest.fixture @pytest.fixture
def tmp_clean_env( def tmp_home(
tmp_pkgs_dirs: pathlib.Path, shared_pkgs_dirs: bool, tmp_environ: None request, tmp_environ, tmp_path_factory: pytest.TempPathFactory
) -> Generator[None, None, None]: ) -> Generator[pathlib.Path, None, None]:
"""Change the home directory to a tmp folder for the duration of a test."""
# Try multiple combination for Unix/Windows
home_envs = ["HOME", "USERPROFILE"]
used_homes = [env for env in home_envs if env in os.environ]
new_home = pathlib.Path.home()
if len(used_homes) > 0:
new_home = tmp_path_factory.mktemp("home")
new_home.mkdir(parents=True, exist_ok=True)
for env in used_homes:
os.environ[env] = str(new_home)
yield new_home
# Pytest would clean it automatically but this can be large (0.5 Gb for repodata)
# We clean it explicitly
if not request.config.getoption("--no-eager-clean"):
helpers.rmtree(new_home)
@pytest.fixture
def tmp_clean_env(tmp_environ: None) -> None:
"""Remove all Conda/Mamba activation artifacts from environment.""" """Remove all Conda/Mamba activation artifacts from environment."""
for k, v in os.environ.items(): for k, v in os.environ.items():
if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA")): if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA")):
@ -105,11 +98,52 @@ def tmp_clean_env(
path_list = os.environ["PATH"].split(os.pathsep) path_list = os.environ["PATH"].split(os.pathsep)
path_list = [p for p in path_list if keep_in_path(p)] path_list = [p for p in path_list if keep_in_path(p)]
os.environ["PATH"] = os.pathsep.join(path_list) os.environ["PATH"] = os.pathsep.join(path_list)
# os.environ restored by tmp_clean_env and tmp_environ
@pytest.fixture(scope="session")
def tmp_pkgs_dirs(tmp_path_factory: pytest.TempPathFactory, request) -> pathlib.Path:
"""A common package cache for mamba downloads.
The directory is not used automatically when calling this fixture.
"""
if (p := request.config.getoption("--mamba-pkgs-dir")) is not None:
p = pathlib.Path(p)
p.mkdir(parents=True, exist_ok=True)
return p
return tmp_path_factory.mktemp("pkgs_dirs")
@pytest.fixture(params=[False])
def shared_pkgs_dirs(request) -> bool:
"""A dummy fixture to control the use of shared package dir."""
return request.param
@pytest.fixture
def tmp_root_prefix(
request,
tmp_path_factory: pytest.TempPathFactory,
tmp_clean_env: None,
tmp_pkgs_dirs: pathlib.Path,
shared_pkgs_dirs: bool,
) -> Generator[pathlib.Path, None, None]:
"""Change the micromamba root directory to a tmp folder for the duration of a test."""
new_root_prefix = tmp_path_factory.mktemp("mamba")
new_root_prefix.mkdir(parents=True, exist_ok=True)
os.environ["MAMBA_ROOT_PREFIX"] = str(new_root_prefix)
if shared_pkgs_dirs: if shared_pkgs_dirs:
os.environ["CONDA_PKGS_DIRS"] = str(tmp_pkgs_dirs) os.environ["CONDA_PKGS_DIRS"] = str(tmp_pkgs_dirs)
yield None yield new_root_prefix
# Pytest would clean it automatically but this can be large (0.5 Gb for repodata)
# We clean it explicitly
if not request.config.getoption("--no-eager-clean"):
helpers.rmtree(new_root_prefix)
# os.environ restored by tmp_clean_env and tmp_environ
@pytest.fixture(params=[helpers.random_string, "long_prefix_" * 20]) @pytest.fixture(params=[helpers.random_string, "long_prefix_" * 20])
@ -120,23 +154,11 @@ def tmp_env_name(request) -> str:
return request.param return request.param
@pytest.fixture
def tmp_root_prefix(
tmp_path: pathlib.Path, tmp_clean_env: None
) -> Generator[pathlib.Path, None, None]:
"""Change the micromamba root directory to a tmp folder for the duration of a test."""
new_root_prefix = tmp_path / "mamba"
new_root_prefix.mkdir(parents=True, exist_ok=True)
os.environ["MAMBA_ROOT_PREFIX"] = str(new_root_prefix)
yield new_root_prefix
# os.environ restored by tmp_clean_env and tmp_environ
@pytest.fixture @pytest.fixture
def tmp_empty_env( def tmp_empty_env(
tmp_root_prefix: pathlib.Path, tmp_env_name: str tmp_root_prefix: pathlib.Path, tmp_env_name: str
) -> Generator[pathlib.Path, None, None]: ) -> Generator[pathlib.Path, None, None]:
"""An empty envirnment created under a temporary root prefix.""" """An empty environment created under a temporary root prefix."""
helpers.create("-n", tmp_env_name, no_dry_run=True) helpers.create("-n", tmp_env_name, no_dry_run=True)
yield tmp_root_prefix / "envs" / tmp_env_name yield tmp_root_prefix / "envs" / tmp_env_name
@ -147,3 +169,10 @@ def tmp_prefix(tmp_empty_env: pathlib.Path) -> Generator[pathlib.Path, None, Non
os.environ["CONDA_PREFIX"] = str(tmp_empty_env) os.environ["CONDA_PREFIX"] = str(tmp_empty_env)
yield tmp_empty_env yield tmp_empty_env
# os.environ restored by tmp_environ through tmp_root_prefix # os.environ restored by tmp_environ through tmp_root_prefix
@pytest.fixture
def tmp_xtensor_env(tmp_prefix: pathlib.Path) -> Generator[pathlib.Path, None, None]:
"""An activated environment with Xtensor installed."""
helpers.install("-c", "conda-forge", "--json", "xtensor", no_dry_run=True)
yield tmp_prefix

View File

@ -52,12 +52,16 @@ MAMBA_NOT_ALLOW_MISSING_PREFIX = 0
MAMBA_NOT_ALLOW_NOT_ENV_PREFIX = 0 MAMBA_NOT_ALLOW_NOT_ENV_PREFIX = 0
MAMBA_NOT_EXPECT_EXISTING_PREFIX = 0 MAMBA_NOT_EXPECT_EXISTING_PREFIX = 0
if platform.system() == "Windows":
xtensor_hpp = "Library/include/xtensor/xtensor.hpp" def lib_prefix() -> Path:
xsimd_hpp = "Library/include/xsimd/xsimd.hpp" """A potential prefix used for library in Conda environments."""
else: if platform.system() == "Windows":
xtensor_hpp = "include/xtensor/xtensor.hpp" return Path("Library")
xsimd_hpp = "include/xsimd/xsimd.hpp" return Path("")
xtensor_hpp = lib_prefix() / "include/xtensor/xtensor.hpp"
xsimd_hpp = lib_prefix() / "include/xsimd/xsimd.hpp"
def get_umamba(cwd=os.getcwd()): def get_umamba(cwd=os.getcwd()):

View File

@ -960,7 +960,7 @@ class TestActivation:
else: else:
print(mamba_exe) print(mamba_exe)
extra_start_code = [ extra_start_code = [
"source ~/.bash_profile", f"source {PurePosixPath(tmp_home)}/.bash_profile", # HOME from os.environ not acknowledged
"micromamba info", "micromamba info",
"echo $MAMBA_ROOT_PREFIX", "echo $MAMBA_ROOT_PREFIX",
"echo $HOME", "echo $HOME",

View File

@ -1,111 +1,77 @@
import json
import os import os
import shutil
import subprocess
from pathlib import Path
import pytest import pytest
from .helpers import create, get_env, info, random_string from . import helpers
class TestInfo: @pytest.mark.parametrize("prefix_selection", [None, "prefix", "name"])
def test_base(tmp_home, tmp_root_prefix, prefix_selection):
os.environ["CONDA_PREFIX"] = str(tmp_root_prefix)
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"] if prefix_selection == "prefix":
current_prefix = os.environ["CONDA_PREFIX"] infos = helpers.info("-p", tmp_root_prefix)
cache = os.path.join(current_root_prefix, "pkgs") elif prefix_selection == "name":
infos = helpers.info("-n", "base")
else:
infos = helpers.info()
env_name = random_string() assert "environment : base (active)" in infos
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string())) assert f"env location : {tmp_root_prefix}" in infos
prefix = os.path.join(root_prefix, "envs", env_name) assert f"user config files : {tmp_home / '.mambarc' }" in infos
user_config = os.path.expanduser(os.path.join("~", ".mambarc")) assert f"base environment : {tmp_root_prefix}" in infos
@classmethod
def setup_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestInfo.root_prefix
os.environ["CONDA_PREFIX"] = TestInfo.prefix
# speed-up the tests @pytest.mark.parametrize("prefix_selection", [None, "prefix", "name"])
os.environ["CONDA_PKGS_DIRS"] = TestInfo.cache def test_env(tmp_home, tmp_root_prefix, tmp_env_name, tmp_prefix, prefix_selection):
if prefix_selection == "prefix":
infos = helpers.info("-p", tmp_prefix)
elif prefix_selection == "name":
infos = helpers.info("-n", tmp_env_name)
else:
infos = helpers.info()
os.makedirs(TestInfo.root_prefix, exist_ok=False) assert f"environment : {tmp_env_name} (active)" in infos
create("-n", TestInfo.env_name, "--offline", no_dry_run=True) assert f"env location : {tmp_prefix}" in infos
assert f"user config files : {tmp_home / '.mambarc' }" in infos
assert f"base environment : {tmp_root_prefix}" in infos
@classmethod
def teardown_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestInfo.current_root_prefix
os.environ["CONDA_PREFIX"] = TestInfo.current_prefix
shutil.rmtree(TestInfo.root_prefix)
@classmethod @pytest.mark.parametrize("existing_prefix", [False, True])
def teardown_method(cls): @pytest.mark.parametrize("prefix_selection", [None, "env_var", "prefix", "name"])
os.environ["CONDA_PREFIX"] = TestInfo.prefix def test_not_env(tmp_home, tmp_root_prefix, prefix_selection, existing_prefix):
name = "not_an_env"
prefix = tmp_root_prefix / "envs" / name
@pytest.mark.parametrize("prefix_selection", [None, "prefix", "name"]) if existing_prefix:
def test_base(self, prefix_selection): prefix.mkdir(parents=True, exist_ok=False)
os.environ["CONDA_PREFIX"] = TestInfo.root_prefix
if prefix_selection == "prefix": if prefix_selection == "prefix":
infos = info("-p", TestInfo.root_prefix) infos = helpers.info("-p", prefix)
elif prefix_selection == "name": elif prefix_selection == "name":
infos = info("-n", "base") infos = helpers.info("-n", name)
else: elif prefix_selection == "env_var":
infos = info() os.environ["CONDA_PREFIX"] = str(prefix)
infos = helpers.info()
assert f"environment : base (active)" in infos else:
assert f"env location : {TestInfo.root_prefix}" in infos os.environ.pop("CONDA_PREFIX", "")
assert f"user config files : {TestInfo.user_config}" in infos infos = helpers.info()
assert f"base environment : {TestInfo.root_prefix}" in infos
@pytest.mark.parametrize("prefix_selection", [None, "prefix", "name"])
def test_env(self, prefix_selection):
if prefix_selection == "prefix":
infos = info("-p", TestInfo.prefix)
elif prefix_selection == "name":
infos = info("-n", TestInfo.env_name)
else:
infos = info()
assert f"environment : {TestInfo.env_name} (active)" in infos
assert f"env location : {TestInfo.prefix}" in infos
assert f"user config files : {TestInfo.user_config}" in infos
assert f"base environment : {TestInfo.root_prefix}" in infos
@pytest.mark.parametrize("existing_prefix", [False, True])
@pytest.mark.parametrize("prefix_selection", [None, "env_var", "prefix", "name"])
def test_not_env(self, prefix_selection, existing_prefix):
name = random_string()
prefix = os.path.join(TestInfo.root_prefix, "envs", name)
if prefix_selection is None:
expected_name = "None"
location = "-"
elif prefix_selection == "env_var":
expected_name = name + " (active)"
location = prefix
else:
if existing_prefix: if existing_prefix:
os.makedirs(prefix, exist_ok=False) expected_name = name + " (not env)"
if prefix_selection == "prefix":
infos = info("-p", prefix)
elif prefix_selection == "name":
infos = info("-n", name)
elif prefix_selection == "env_var":
os.environ["CONDA_PREFIX"] = prefix
infos = info()
else: else:
os.environ.pop("CONDA_PREFIX") expected_name = name + " (not found)"
infos = info() location = prefix
print(infos)
if prefix_selection is None: assert f"environment : {expected_name}" in infos
expected_name = "None" assert f"env location : {location}" in infos
location = "-" assert f"user config files : {tmp_home / '.mambarc' }" in infos
elif prefix_selection == "env_var": assert f"base environment : {tmp_root_prefix}" in infos
expected_name = name + " (active)"
location = prefix
else:
if existing_prefix:
expected_name = name + " (not env)"
else:
expected_name = name + " (not found)"
location = prefix
assert f"environment : {expected_name}" in infos
assert f"env location : {location}" in infos
assert f"user config files : {TestInfo.user_config}" in infos
assert f"base environment : {TestInfo.root_prefix}" in infos

View File

@ -1,28 +1,18 @@
import json
import os import os
import platform
import random
import shutil import shutil
import stat
import string
import subprocess import subprocess
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
import pytest import pytest
from .helpers import * from . import helpers
if platform.system() == "Windows":
xtensor_hpp = "Library/include/xtensor/xtensor.hpp"
else:
xtensor_hpp = "include/xtensor/xtensor.hpp"
def cache_file(cache: Path, pkg_name: str) -> Optional[Path]: def find_cache_archive(cache: Path, pkg_name: str) -> Optional[Path]:
tar_bz2 = cache / (pkg_name + ".tar.bz2") """Find the archive used in cache from the complete build name."""
conda = cache / (pkg_name + ".conda") tar_bz2 = cache / f"{pkg_name}.tar.bz2"
print("Checking for", tar_bz2, conda) conda = cache / f"{pkg_name}.conda"
if tar_bz2.exists(): if tar_bz2.exists():
return tar_bz2 return tar_bz2
elif conda.exists(): elif conda.exists():
@ -30,178 +20,198 @@ def cache_file(cache: Path, pkg_name: str) -> Optional[Path]:
return None return None
def find_pkg_build(cache: Path, name: str) -> str:
"""Find the build name of a package in the cache from the pacakge name."""
matches = [p for p in cache.glob(f"{name}*") if p.is_dir()]
assert len(matches) == 1
return matches[0].name
@pytest.fixture(scope="module")
def tmp_shared_cache_xtensor(tmp_path_factory: pytest.TempPathFactory):
"""Create shared cache folder with an xtensor package."""
root = tmp_path_factory.mktemp("xtensor")
helpers.create(
"-n",
"xtensor",
"--no-env",
"--no-rc",
"-r",
root,
"-c",
"conda-forge",
"--no-deps",
"xtensor",
no_dry_run=True,
)
return root / "pkgs"
@pytest.fixture(params=[True])
def tmp_cache_writable(request) -> bool:
"""A dummy fixture to control the writability of ``tmp_cache``."""
return request.param
@pytest.fixture
def tmp_cache(
tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path, tmp_cache_writable: bool
) -> Path:
"""The default cache folder associated with the root_prefix and an xtensor package."""
cache: Path = tmp_root_prefix / "pkgs"
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
if not tmp_cache_writable:
helpers.recursive_chmod(cache, 0o500)
return cache
@pytest.fixture
def tmp_cache_xtensor_dir(tmp_cache: Path) -> Path:
"""The location of the Xtensor cache directory within the package cache."""
return tmp_cache / find_pkg_build(tmp_cache, "xtensor")
@pytest.fixture
def tmp_cache_xtensor_pkg(tmp_cache: Path) -> Path:
"""The location of the Xtensor cache artifact (tarball) within the cache directory."""
return find_cache_archive(tmp_cache, find_pkg_build(tmp_cache, "xtensor"))
@pytest.fixture
def tmp_cache_xtensor_hpp(tmp_cache_xtensor_dir: Path) -> Path:
"""The location of the Xtensor header (part of the package) within the cache directory."""
return tmp_cache_xtensor_dir / helpers.xtensor_hpp
class TestPkgCache: class TestPkgCache:
def test_extracted_file_deleted(
self, tmp_home, tmp_cache_xtensor_hpp, tmp_root_prefix
):
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
os.remove(tmp_cache_xtensor_hpp)
current_root_prefix = os.environ.get("MAMBA_ROOT_PREFIX", "") env_name = "some_env"
current_prefix = os.environ.get("CONDA_PREFIX", "") helpers.create("xtensor", "-n", env_name, no_dry_run=True)
cache = os.path.join(current_root_prefix, "pkgs")
env_name = random_string() linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
stat_xf = None
stat_orig = None
pkg_name = None
orig_file_path = None
@classmethod
@pytest.fixture
def cache(cls, existing_cache, test_pkg):
cache = Path(os.path.expanduser(os.path.join("~", "cache" + random_string())))
os.makedirs(cache)
link_dir(cache, existing_cache)
os.environ["CONDA_PKGS_DIRS"] = str(cache)
yield cache
if cache.exists():
os.chmod(cache, 0o700)
os.chmod(cache / test_pkg, 0o700)
os.chmod(cache / test_pkg / xtensor_hpp, 0o700)
rmtree(cache)
@classmethod
@pytest.fixture
def cached_file(cls, cache, test_pkg):
return cache / test_pkg / xtensor_hpp
@classmethod
def setup_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestPkgCache.root_prefix
os.environ["CONDA_PREFIX"] = TestPkgCache.prefix
@classmethod
def teardown_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestPkgCache.current_root_prefix
os.environ["CONDA_PREFIX"] = TestPkgCache.current_prefix
if "CONDA_PKGS_DIRS" in os.environ:
os.environ.pop("CONDA_PKGS_DIRS")
if Path(TestPkgCache.root_prefix).exists():
rmtree(TestPkgCache.root_prefix)
@classmethod
def teardown_method(cls):
envs_dir = os.path.join(TestPkgCache.root_prefix, "envs")
if Path(envs_dir).exists():
rmtree(envs_dir)
def test_extracted_file_deleted(self, cached_file):
old_ino = cached_file.stat().st_ino
os.remove(cached_file)
env = "x1"
create("xtensor", "-n", env, "--json", no_dry_run=True)
linked_file = get_env(env, xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
linked_file_stats = linked_file.stat() linked_file_stats = linked_file.stat()
assert cached_file.stat().st_dev == linked_file_stats.st_dev assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert cached_file.stat().st_ino == linked_file_stats.st_ino assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert old_ino != linked_file_stats.st_ino assert old_ino != linked_file_stats.st_ino
@pytest.mark.parametrize("safety_checks", ["disabled", "warn", "enabled"]) @pytest.mark.parametrize("safety_checks", ["disabled", "warn", "enabled"])
def test_extracted_file_corrupted(self, safety_checks, cached_file): def test_extracted_file_corrupted(
old_ino = cached_file.stat().st_ino self, tmp_home, tmp_root_prefix, tmp_cache_xtensor_hpp, safety_checks
):
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
with open(cached_file, "w") as f: with open(tmp_cache_xtensor_hpp, "w") as f:
f.write("//corruption") f.write("//corruption")
env = "x1" env_name = "x1"
create( helpers.create(
"xtensor", "xtensor",
"-n", "-n",
env, env_name,
"--json", "--json",
"--safety-checks", "--safety-checks",
safety_checks, safety_checks,
no_dry_run=True, no_dry_run=True,
) )
linked_file = get_env(env, xtensor_hpp) linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
assert linked_file.exists() assert linked_file.exists()
linked_file_stats = linked_file.stat() linked_file_stats = linked_file.stat()
assert cached_file.stat().st_dev == linked_file_stats.st_dev assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert cached_file.stat().st_ino == linked_file_stats.st_ino assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
if safety_checks == "enabled": if safety_checks == "enabled":
assert old_ino != linked_file_stats.st_ino assert old_ino != linked_file_stats.st_ino
else: else:
assert old_ino == linked_file_stats.st_ino assert old_ino == linked_file_stats.st_ino
def test_tarball_deleted(self, cached_file, test_pkg, cache): def test_tarball_deleted(
tarball = cache_file(cache, test_pkg) self,
assert tarball.exists() tmp_home,
os.remove(tarball) tmp_root_prefix,
tmp_cache_xtensor_pkg,
tmp_cache_xtensor_hpp,
tmp_cache,
):
assert tmp_cache_xtensor_pkg.exists()
os.remove(tmp_cache_xtensor_pkg)
env = "x1" env_name = "x1"
create("xtensor", "-n", env, "--json", no_dry_run=True) helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
linked_file = get_env(env, xtensor_hpp) linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
assert linked_file.exists() assert linked_file.exists()
linked_file_stats = linked_file.stat() linked_file_stats = linked_file.stat()
assert not (tarball).exists() assert not tmp_cache_xtensor_pkg.exists()
assert cached_file.stat().st_dev == linked_file_stats.st_dev assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert cached_file.stat().st_ino == linked_file_stats.st_ino assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
def test_tarball_and_extracted_file_deleted(self, cache, test_pkg, cached_file): def test_tarball_and_extracted_file_deleted(
tarball = cache_file(cache, test_pkg) self, tmp_home, tmp_root_prefix, tmp_cache_xtensor_pkg, tmp_cache_xtensor_hpp
tarball_size = tarball.stat().st_size ):
old_ino = cached_file.stat().st_ino xtensor_pkg_size = tmp_cache_xtensor_pkg.stat().st_size
os.remove(cached_file) old_ino = tmp_cache_xtensor_hpp.stat().st_ino
os.remove(tarball) os.remove(tmp_cache_xtensor_hpp)
os.remove(tmp_cache_xtensor_pkg)
env = "x1" env_name = "x1"
create("xtensor", "-n", env, "--json", no_dry_run=True) helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
linked_file = get_env(env, xtensor_hpp) linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
assert linked_file.exists() assert linked_file.exists()
linked_file_stats = linked_file.stat() linked_file_stats = linked_file.stat()
assert tarball.exists() assert tmp_cache_xtensor_pkg.exists()
assert tarball_size == tarball.stat().st_size assert xtensor_pkg_size == tmp_cache_xtensor_pkg.stat().st_size
assert cached_file.stat().st_dev == linked_file_stats.st_dev assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert cached_file.stat().st_ino == linked_file_stats.st_ino assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert old_ino != linked_file_stats.st_ino assert old_ino != linked_file_stats.st_ino
def test_tarball_corrupted_and_extracted_file_deleted( def test_tarball_corrupted_and_extracted_file_deleted(
self, cache, test_pkg, cached_file self, tmp_home, tmp_root_prefix, tmp_cache_xtensor_pkg, tmp_cache_xtensor_hpp
): ):
tarball = cache_file(cache, test_pkg) xtensor_pkg_size = tmp_cache_xtensor_pkg.stat().st_size
tarball_size = tarball.stat().st_size old_ino = tmp_cache_xtensor_hpp.stat().st_ino
old_ino = cached_file.stat().st_ino os.remove(tmp_cache_xtensor_hpp)
os.remove(cached_file) os.remove(tmp_cache_xtensor_pkg)
os.remove(tarball) with open(tmp_cache_xtensor_pkg, "w") as f:
with open(tarball, "w") as f:
f.write("") f.write("")
env = "x1" env_name = "x1"
create("xtensor", "-n", env, "--json", no_dry_run=True) helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
linked_file = get_env(env, xtensor_hpp) linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
assert linked_file.exists() assert linked_file.exists()
linked_file_stats = linked_file.stat() linked_file_stats = linked_file.stat()
assert tarball.exists() assert tmp_cache_xtensor_pkg.exists()
assert tarball_size == tarball.stat().st_size assert xtensor_pkg_size == tmp_cache_xtensor_pkg.stat().st_size
assert cached_file.stat().st_dev == linked_file_stats.st_dev assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert cached_file.stat().st_ino == linked_file_stats.st_ino assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert old_ino != linked_file_stats.st_ino assert old_ino != linked_file_stats.st_ino
@pytest.mark.parametrize("safety_checks", ("disabled", "warn", "enabled")) @pytest.mark.parametrize("safety_checks", ("disabled", "warn", "enabled"))
def test_extracted_file_corrupted_no_perm( def test_extracted_file_corrupted_no_perm(
self, cache, cached_file, test_pkg, safety_checks self,
tmp_home,
tmp_root_prefix,
tmp_cache_xtensor_pkg,
tmp_cache_xtensor_hpp,
safety_checks,
): ):
with open(cached_file, "w") as f: with open(tmp_cache_xtensor_hpp, "w") as f:
f.write("//corruption") f.write("//corruption")
recursive_chmod(cache / test_pkg, 0o500) helpers.recursive_chmod(tmp_cache_xtensor_pkg, 0o500)
old_ino = cached_file.stat().st_ino # old_ino = tmp_cache_xtensor_hpp.stat().st_ino
env = "x1" env = "x1"
cmd_args = ( cmd_args = (
@ -215,181 +225,132 @@ class TestPkgCache:
) )
with pytest.raises(subprocess.CalledProcessError): with pytest.raises(subprocess.CalledProcessError):
create(*cmd_args, no_dry_run=True) helpers.create(*cmd_args, no_dry_run=True)
@pytest.fixture
def tmp_cache_alt(tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path) -> Path:
"""Make an alternative package cache outside the root prefix."""
cache = (
tmp_root_prefix / "more-pkgs"
) # Creating under root prefix to leverage eager cleanup
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
return cache
def repodata_json(cache: Path) -> set[Path]:
return set((cache / "cache").glob("*.json"))
def repodata_solv(cache: Path) -> set[Path]:
return set((cache / "cache").glob("*.solv"))
def same_repodata_json_solv(cache: Path):
return {p.stem for p in repodata_json(cache)} == {
p.stem for p in repodata_solv(cache)
}
class TestMultiplePkgCaches: class TestMultiplePkgCaches:
@pytest.mark.parametrize(
current_root_prefix = os.environ.get("MAMBA_ROOT_PREFIX", "") "cache", (pytest.lazy_fixture(("tmp_cache", "tmp_cache_alt")))
current_prefix = os.environ.get("CONDA_PREFIX", "") )
cache = os.path.join(current_root_prefix, "pkgs") def test_different_caches(self, tmp_home, tmp_root_prefix, cache):
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
@staticmethod
@pytest.fixture
def cache1(existing_cache, first_cache_is_writable):
cache = Path(os.path.expanduser(os.path.join("~", "cache" + random_string())))
os.makedirs(cache)
if first_cache_is_writable:
link_dir(cache, existing_cache)
else:
recursive_chmod(cache, 0o500)
yield cache
if cache.exists():
rmtree(cache)
@staticmethod
@pytest.fixture
def cache2(existing_cache, first_cache_is_writable):
cache = Path(os.path.expanduser(os.path.join("~", "cache" + random_string())))
os.makedirs(cache)
link_dir(cache, existing_cache)
yield cache
if cache.exists():
rmtree(cache)
@staticmethod
@pytest.fixture
def used_cache(cache1, cache2, first_cache_is_writable):
if first_cache_is_writable:
yield cache1
else:
yield cache2
@staticmethod
@pytest.fixture
def unused_cache(cache1, cache2, first_cache_is_writable):
if first_cache_is_writable:
yield cache2
else:
yield cache1
@classmethod
def setup_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestMultiplePkgCaches.root_prefix
os.environ["CONDA_PREFIX"] = TestMultiplePkgCaches.prefix
if "CONDA_PKGS_DIRS" in os.environ:
os.environ.pop("CONDA_PKGS_DIRS")
@classmethod
def teardown_class(cls):
os.environ["MAMBA_ROOT_PREFIX"] = TestMultiplePkgCaches.current_root_prefix
os.environ["CONDA_PREFIX"] = TestMultiplePkgCaches.current_prefix
if Path(TestMultiplePkgCaches.root_prefix).exists():
rmtree(TestMultiplePkgCaches.root_prefix)
@classmethod
def teardown_method(cls):
if "CONDA_PKGS_DIRS" in os.environ:
os.environ.pop("CONDA_PKGS_DIRS")
@pytest.mark.parametrize("cache", (pytest.lazy_fixture(("cache1", "cache2"))))
def test_different_caches(self, cache):
os.environ["CONDA_PKGS_DIRS"] = f"{cache}" os.environ["CONDA_PKGS_DIRS"] = f"{cache}"
env_name = "some_env"
res = helpers.create("-n", env_name, "xtensor", "-v", "--json", no_dry_run=True)
env_name = TestMultiplePkgCaches.env_name linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
res = create("-n", env_name, "xtensor", "-v", "--json", no_dry_run=True)
linked_file = get_env(env_name, xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
pkg_name = get_concrete_pkg(res, "xtensor") pkg_name = helpers.get_concrete_pkg(res, "xtensor")
cache_file = cache / pkg_name / xtensor_hpp cache_file = cache / pkg_name / helpers.xtensor_hpp
assert cache_file.exists() assert cache_file.exists()
assert linked_file.stat().st_dev == cache_file.stat().st_dev assert linked_file.stat().st_dev == cache_file.stat().st_dev
assert linked_file.stat().st_ino == cache_file.stat().st_ino assert linked_file.stat().st_ino == cache_file.stat().st_ino
@pytest.mark.parametrize("first_cache_is_writable", (False, True)) @pytest.mark.parametrize("tmp_cache_writable", [False, True], indirect=True)
def test_first_writable( def test_first_writable(
self, first_cache_is_writable, cache1, cache2, used_cache, unused_cache self,
tmp_home,
tmp_root_prefix,
tmp_cache_writable,
tmp_cache,
tmp_cache_alt,
): ):
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = TestMultiplePkgCaches.env_name
res = create( env_name = "some_env"
"-n", res = helpers.create("-n", env_name, "xtensor", "--json", no_dry_run=True)
env_name,
"xtensor",
"--json",
no_dry_run=True,
)
linked_file = get_env(env_name, xtensor_hpp) linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
assert linked_file.exists() assert linked_file.exists()
pkg_name = get_concrete_pkg(res, "xtensor") pkg_name = helpers.get_concrete_pkg(res, "xtensor")
cache_file = used_cache / pkg_name / xtensor_hpp # A previous version of this test was attempting to test that the installed file
# was linked from the first writable pkgs dir, however it passed only because of a bug
# in how it used pytest.
# The first pkgs dir can be used to link, even if it is not writable.
cache_file = tmp_cache / pkg_name / helpers.xtensor_hpp
assert cache_file.exists() assert cache_file.exists()
assert linked_file.stat().st_dev == cache_file.stat().st_dev assert linked_file.stat().st_dev == cache_file.stat().st_dev
assert linked_file.stat().st_ino == cache_file.stat().st_ino assert linked_file.stat().st_ino == cache_file.stat().st_ino
def test_no_writable(self, cache1, cache2, test_pkg): def test_no_writable(self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt):
rmtree(cache1 / test_pkg) helpers.rmtree(tmp_cache / find_pkg_build(tmp_cache, "xtensor"))
# will also chmod cache2 since they are both helpers.recursive_chmod(tmp_cache, 0o500)
# hardlinks to the same files
recursive_chmod(cache1, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = TestMultiplePkgCaches.env_name
create("-n", env_name, "xtensor", "--json", no_dry_run=True) helpers.create("-n", "myenv", "xtensor", "--json", no_dry_run=True)
def test_no_writable_extracted_dir_corrupted( def test_no_writable_extracted_dir_corrupted(
self, cache1, test_pkg, repodata_files self, tmp_home, tmp_root_prefix, tmp_cache
): ):
# will also chmod cache2 since they are both (
# hardlinks to the same files tmp_cache / find_pkg_build(tmp_cache, "xtensor") / helpers.xtensor_hpp
rmtree(cache1 / test_pkg / xtensor_hpp) ).unlink()
recursive_chmod(cache1, 0o500) helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache}"
env_name = TestMultiplePkgCaches.env_name
with pytest.raises(subprocess.CalledProcessError): with pytest.raises(subprocess.CalledProcessError):
create("-n", env_name, "xtensor", "-vv", "--json", no_dry_run=True) helpers.create("-n", "myenv", "xtensor", "-vv", "--json", no_dry_run=True)
def test_first_writable_extracted_dir_corrupted( def test_first_writable_extracted_dir_corrupted(
self, cache1, cache2, test_pkg, repodata_files self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
): ):
rmtree(cache1) # convenience for cache teardown xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
os.makedirs(cache1) helpers.rmtree(tmp_cache) # convenience for cache teardown
open(cache1 / "urls.txt", "w") # chmod only set read-only flag on Windows os.makedirs(tmp_cache)
recursive_chmod(cache1, 0o500) open(tmp_cache / "urls.txt", "w") # chmod only set read-only flag on Windows
rmtree(cache2 / test_pkg / xtensor_hpp) helpers.recursive_chmod(tmp_cache, 0o500)
helpers.rmtree(tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = TestMultiplePkgCaches.env_name env_name = "myenv"
create("-n", env_name, "xtensor", "-vv", "--json", no_dry_run=True) helpers.create("-n", env_name, "xtensor", "-vv", "--json", no_dry_run=True)
linked_file = get_env(env_name, xtensor_hpp) linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
non_writable_cache_file = cache1 / test_pkg / xtensor_hpp
writable_cache_file = cache2 / test_pkg / xtensor_hpp
# check repodata files # check repodata files
for f in repodata_files: assert repodata_json(tmp_cache) == set()
for ext in ["json", "solv"]: assert repodata_json(tmp_cache_alt) != set()
assert not (cache1 / "cache" / (f + "." + ext)).exists() assert same_repodata_json_solv(tmp_cache_alt)
assert (cache2 / "cache" / (f + "." + ext)).exists()
# check tarballs # check tarballs
assert cache_file(cache1, test_pkg) is None assert find_cache_archive(tmp_cache, xtensor_bld) is None
assert cache_file(cache2, test_pkg).exists() assert find_cache_archive(tmp_cache_alt, xtensor_bld).exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
# check extracted files # check extracted files
assert not non_writable_cache_file.exists() assert not non_writable_cache_file.exists()
@ -400,35 +361,37 @@ class TestMultiplePkgCaches:
assert linked_file.stat().st_ino == writable_cache_file.stat().st_ino assert linked_file.stat().st_ino == writable_cache_file.stat().st_ino
def test_extracted_tarball_only_in_non_writable_cache( def test_extracted_tarball_only_in_non_writable_cache(
self, cache1, cache2, test_pkg, repodata_files self,
tmp_root_prefix,
tmp_home,
tmp_cache,
tmp_cache_alt,
tmp_cache_xtensor_dir,
): ):
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
helpers.rmtree(find_cache_archive(tmp_cache, xtensor_bld))
helpers.rmtree(tmp_cache_alt)
helpers.recursive_chmod(tmp_cache, 0o500)
tarball = cache_file(cache1, test_pkg) os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
rmtree(tarball) env_name = "myenv"
# this will chmod 700 the hardlinks and have to be done before chmod cache1
rmtree(cache2)
recursive_chmod(cache1, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" helpers.create("-n", env_name, "xtensor", "--json", no_dry_run=True)
env_name = TestMultiplePkgCaches.env_name
create("-n", env_name, "xtensor", "--json", no_dry_run=True) linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
linked_file = get_env(env_name, xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
non_writable_cache_file = cache1 / test_pkg / xtensor_hpp
writable_cache_file = cache2 / test_pkg / xtensor_hpp
# check repodata files # check repodata files
for f in repodata_files: assert repodata_json(tmp_cache) != set()
for ext in ["json", "solv"]: assert same_repodata_json_solv(tmp_cache)
assert (cache1 / "cache" / (f + "." + ext)).exists() assert repodata_json(tmp_cache_alt) == set()
assert not (cache2 / "cache" / (f + "." + ext)).exists()
# check tarballs # check tarballs
assert cache_file(cache1, test_pkg) is None assert find_cache_archive(tmp_cache, xtensor_bld) is None
assert cache_file(cache2, test_pkg) is None assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
# check extracted files # check extracted files
assert non_writable_cache_file.exists() assert non_writable_cache_file.exists()
@ -439,32 +402,32 @@ class TestMultiplePkgCaches:
assert linked_file.stat().st_ino == non_writable_cache_file.stat().st_ino assert linked_file.stat().st_ino == non_writable_cache_file.stat().st_ino
def test_missing_extracted_dir_in_non_writable_cache( def test_missing_extracted_dir_in_non_writable_cache(
self, cache1, cache2, test_pkg, repodata_files self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
): ):
rmtree(cache1 / test_pkg) xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
rmtree(cache2) helpers.rmtree(tmp_cache / xtensor_bld)
recursive_chmod(cache1, 0o500) helpers.rmtree(tmp_cache_alt)
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = TestMultiplePkgCaches.env_name env_name = "myenv"
create("-n", env_name, "xtensor", "--json", no_dry_run=True) helpers.create("-n", env_name, "xtensor", "--json", no_dry_run=True)
linked_file = get_env(env_name, xtensor_hpp) linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
non_writable_cache_file = cache1 / test_pkg / xtensor_hpp non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = cache2 / test_pkg / xtensor_hpp writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
# check repodata files # check repodata files
for f in repodata_files: assert repodata_json(tmp_cache) != set()
for ext in ["json", "solv"]: assert same_repodata_json_solv(tmp_cache)
assert (cache1 / "cache" / (f + "." + ext)).exists() assert repodata_json(tmp_cache_alt) == set()
assert not (cache2 / "cache" / (f + "." + ext)).exists()
# check tarballs # check tarballs
assert cache_file(cache1, test_pkg).exists() assert find_cache_archive(tmp_cache, xtensor_bld).exists()
assert cache_file(cache2, test_pkg) is None assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
# check extracted files # check extracted files
assert not non_writable_cache_file.exists() assert not non_writable_cache_file.exists()
@ -475,37 +438,37 @@ class TestMultiplePkgCaches:
assert linked_file.stat().st_ino == writable_cache_file.stat().st_ino assert linked_file.stat().st_ino == writable_cache_file.stat().st_ino
def test_corrupted_extracted_dir_in_non_writable_cache( def test_corrupted_extracted_dir_in_non_writable_cache(
self, cache1, cache2, test_pkg, repodata_files self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
): ):
rmtree(cache1 / test_pkg / xtensor_hpp) xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
rmtree(cache2) # convenience for cache teardown helpers.rmtree(tmp_cache / xtensor_bld / helpers.xtensor_hpp)
os.makedirs(cache2) helpers.rmtree(tmp_cache_alt) # convenience for cache teardown
recursive_chmod(cache1, 0o500) os.makedirs(tmp_cache_alt)
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = TestMultiplePkgCaches.env_name env_name = "myenv"
create("-n", env_name, "-vv", "xtensor", "--json", no_dry_run=True) helpers.create("-n", env_name, "-vv", "xtensor", "--json", no_dry_run=True)
linked_file = get_env(env_name, xtensor_hpp) linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
non_writable_cache_file = cache1 / test_pkg / xtensor_hpp
writable_cache_file = cache2 / test_pkg / xtensor_hpp
# check repodata files # check repodata files
for f in repodata_files: assert repodata_json(tmp_cache) != set()
for ext in ["json", "solv"]: assert same_repodata_json_solv(tmp_cache)
assert (cache1 / "cache" / (f + "." + ext)).exists() assert repodata_json(tmp_cache_alt) == set()
assert not (cache2 / "cache" / (f + "." + ext)).exists()
# check tarballs # check tarballs
assert cache_file(cache1, test_pkg).exists() assert find_cache_archive(tmp_cache, xtensor_bld).exists()
assert cache_file(cache2, test_pkg) is None assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
# check extracted dir # check extracted dir
assert (cache1 / test_pkg).exists() assert (tmp_cache / xtensor_bld).exists()
assert (cache2 / test_pkg).exists() assert (tmp_cache_alt / xtensor_bld).exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
# check extracted files # check extracted files
assert not non_writable_cache_file.exists() assert not non_writable_cache_file.exists()
@ -516,15 +479,16 @@ class TestMultiplePkgCaches:
assert linked_file.stat().st_ino == writable_cache_file.stat().st_ino assert linked_file.stat().st_ino == writable_cache_file.stat().st_ino
def test_expired_but_valid_repodata_in_non_writable_cache( def test_expired_but_valid_repodata_in_non_writable_cache(
self, cache1, cache2, test_pkg, repodata_files self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
): ):
rmtree(cache2) helpers.rmtree(tmp_cache_alt)
recursive_chmod(cache1, 0o500) helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{cache1},{cache2}" os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = TestMultiplePkgCaches.env_name env_name = "myenv"
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
create( helpers.create(
"-n", "-n",
env_name, env_name,
"xtensor", "xtensor",
@ -534,25 +498,25 @@ class TestMultiplePkgCaches:
no_dry_run=True, no_dry_run=True,
) )
linked_file = get_env(env_name, xtensor_hpp) linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
assert linked_file.exists() assert linked_file.exists()
non_writable_cache_file = cache1 / test_pkg / xtensor_hpp
writable_cache_file = cache2 / test_pkg / xtensor_hpp
# check repodata files # check repodata files
for f in repodata_files: assert repodata_json(tmp_cache) != set()
for ext in ["json", "solv"]: assert same_repodata_json_solv(tmp_cache)
assert (cache1 / "cache" / (f + "." + ext)).exists() assert repodata_json(tmp_cache_alt) != set()
assert (cache2 / "cache" / (f + "." + ext)).exists() assert same_repodata_json_solv(tmp_cache_alt)
# check tarballs # check tarballs
assert cache_file(cache1, test_pkg).exists() assert find_cache_archive(tmp_cache, xtensor_bld).exists()
assert cache_file(cache2, test_pkg) is None assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
# check extracted dir # check extracted dir
assert (cache1 / test_pkg).exists() assert (tmp_cache / xtensor_bld).exists()
assert not (cache2 / test_pkg).exists() assert not (tmp_cache_alt / xtensor_bld).exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
# check extracted files # check extracted files
assert non_writable_cache_file.exists() assert non_writable_cache_file.exists()