Make integration tests not rely on specific organisation of packages (#3897)

Co-authored-by: Johan Mabille <johan.mabille@gmail.com>
Co-authored-by: Hind-M <70631848+Hind-M@users.noreply.github.com>
Co-authored-by: Julien Jerphanion <git@jjerphan.xyz>
This commit is contained in:
Klaim (Joël Lamotte) 2025-05-20 13:47:29 +02:00 committed by GitHub
parent e807e690ee
commit b0da2730f3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 334 additions and 183 deletions

View File

@ -56,10 +56,6 @@ def lib_prefix() -> Path:
return Path("")
xtensor_hpp = lib_prefix() / "include/xtensor/containers/xtensor.hpp"
xsimd_hpp = lib_prefix() / "include/xsimd/xsimd.hpp"
def get_umamba(cwd=os.getcwd()):
if os.getenv("TEST_MAMBA_EXE"):
umamba = os.getenv("TEST_MAMBA_EXE")
@ -542,3 +538,87 @@ def create_with_chan_pkg(env_name, channels, package):
cmd.append(package)
return create(*cmd, default_channel=False, no_rc=False)
class PackageChecker:
# Provides integrity checking operations for an installed package, based on its manifest.
package_name: string
install_prefix_root_dir: Path
manifests_dir: Path
_manifest_info: object
_manifest_json_path: Path
def __init__(
self, package_name: string, install_prefix_root_dir: Path, require_manifest: bool = True
):
# package_name : the name of the package to work with, without version or build name, for example 'xtensor'
# install_prefix_root_dir : the absolute path to the directory in which the package should be installed and found
self._require_manifest = require_manifest
assert install_prefix_root_dir.is_absolute()
assert package_name
self.package_name = package_name
assert install_prefix_root_dir
assert os.path.isdir(install_prefix_root_dir), (
f"not a directory or doesnt exist: {install_prefix_root_dir}"
)
self.install_prefix_root_dir = install_prefix_root_dir
if require_manifest:
self.manifests_dir = self.install_prefix_root_dir / "conda-meta"
assert os.path.isdir(self.manifests_dir), (
f"not a directory or doesnt exist: {self.manifests_dir}"
)
def check_install_integrity(self):
# Checks that the manifest of the package is installed and checks that every file listed in it
# exists. An assertion will fail otherwise.
manifest_info = self.get_manifest_info()
for file in manifest_info["files"]:
installed_file_path = self.install_prefix_root_dir.joinpath(file)
assert installed_file_path.is_file()
def get_manifest_info(self) -> object:
# Look for and read the manifest file for the package and returns a dict with its content.
# If the manifest file is not found or if opening it fails, an assertion will fail.
if not hasattr(self, "_manifest_info") or not self._manifest_info:
manifest_json_paths = list(self.manifests_dir.glob(f"{self.package_name}-*.*.*-*.json"))
assert manifest_json_paths
assert len(manifest_json_paths) == 1
manifest_json_path = manifest_json_paths[0]
with open(manifest_json_path) as json_file:
self._manifest_info = json.load(json_file)
assert self._manifest_info
return self._manifest_info
def find_installed(self, name_or_relative_path: string) -> Path:
# Searches in the manifest of the package a given file name or relative path that must have been installed.
# Returns the absolute path to that file once found, or None if not found.
# An assertion will fail if the file is found in the manifst but does not exist in the install directory.
if self._require_manifest:
manifest_info = self.get_manifest_info()
for file in manifest_info["files"]:
if file.endswith(name_or_relative_path):
absolute_path = self.install_prefix_root_dir.joinpath(file).absolute()
assert absolute_path.exists()
return absolute_path
else:
# We search for the file in the directory, without assuming it must exist in the manifest
for file in self.install_prefix_root_dir.glob(f"**/{name_or_relative_path}"):
return file.absolute()
return None
def get_name_version_build(self) -> string:
# A name that matches what `get_concrete_pkg` would return: `package_name-X.Y.Z-build_number``
manifest_info = self.get_manifest_info()
return f"{manifest_info['name']}-{manifest_info['version']}-{manifest_info['build_string']}"

View File

@ -807,14 +807,15 @@ def test_unicode_activation(
call(s3)
for u in [u1, u2, u3]:
assert (tmp_root_prefix / f"envs/{u}/conda-meta").is_dir()
assert (tmp_root_prefix / f"envs/{u}/conda-meta/history").exists()
install_prefix_root_dir = tmp_root_prefix / f"envs/{u}"
assert (install_prefix_root_dir / "conda-meta").is_dir()
assert (install_prefix_root_dir / "conda-meta/history").exists()
if plat == "win":
include_dir = tmp_root_prefix / f"envs/{u}/Library/include"
include_dir = install_prefix_root_dir / "Library/include"
else:
include_dir = tmp_root_prefix / f"envs/{u}/include"
assert (include_dir / "xtensor/containers/xtensor.hpp").exists()
include_dir = install_prefix_root_dir / "include"
assert include_dir.is_dir()
helpers.PackageChecker("xtensor", install_prefix_root_dir).check_install_integrity()
# unicode activation on win: todo
if plat == "win":

View File

@ -604,8 +604,8 @@ def test_classic_specs(tmp_home, tmp_root_prefix, tmp_path, outside_root_prefix)
if helpers.dry_run_tests == helpers.DryRun.OFF:
pkg_name = helpers.get_concrete_pkg(res, "xtensor")
cached_file = tmp_pkgs_dirs / pkg_name / helpers.xtensor_hpp
assert cached_file.exists()
pkg_checker = helpers.PackageChecker("xtensor", p)
assert pkg_name == pkg_checker.get_name_version_build()
@pytest.mark.parametrize("output_flag", ["", "--json", "--quiet"])

View File

@ -415,10 +415,9 @@ class TestInstall:
if not helpers.dry_run_tests:
pkg_name = helpers.get_concrete_pkg(res, "xtensor")
orig_file_path = helpers.get_pkg(
pkg_name, helpers.xtensor_hpp, TestInstall.current_root_prefix
)
assert orig_file_path.exists()
checker = helpers.PackageChecker("xtensor", TestInstall.current_root_prefix)
checker.check_install_integrity()
assert checker.get_name_version_build() == pkg_name
@pytest.mark.skipif(
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,

View File

@ -9,10 +9,8 @@ import pytest
from .helpers import * # noqa: F403
from . import helpers
if platform.system() == "Windows":
xtensor_hpp = "Library/include/xtensor/containers/xtensor.hpp"
else:
xtensor_hpp = "include/xtensor/containers/xtensor.hpp"
package_to_test = "xtensor"
file_in_package_to_test = "xtensor.hpp"
class TestLinking:
@ -41,32 +39,40 @@ class TestLinking:
helpers.rmtree(TestLinking.prefix)
def test_link(self, existing_cache, test_pkg):
helpers.create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
helpers.create(package_to_test, "-n", TestLinking.env_name, "--json", no_dry_run=True)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert not linked_file.is_symlink()
install_env_dir = helpers.get_env(TestLinking.env_name)
pkg_checker = helpers.PackageChecker(package_to_test, install_env_dir)
linked_file_path = pkg_checker.find_installed(file_in_package_to_test)
assert linked_file_path
assert linked_file_path.exists()
assert not linked_file_path.is_symlink()
cache_file = existing_cache / test_pkg / xtensor_hpp
assert cache_file.stat().st_dev == linked_file.stat().st_dev
assert cache_file.stat().st_ino == linked_file.stat().st_ino
linked_file_rel_path = linked_file_path.relative_to(install_env_dir)
cache_file = existing_cache / test_pkg / linked_file_rel_path
assert cache_file.stat().st_dev == linked_file_path.stat().st_dev
assert cache_file.stat().st_ino == linked_file_path.stat().st_ino
def test_copy(self, existing_cache, test_pkg):
helpers.create(
"xtensor",
package_to_test,
"-n",
TestLinking.env_name,
"--json",
"--always-copy",
no_dry_run=True,
)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert not linked_file.is_symlink()
install_env_dir = helpers.get_env(TestLinking.env_name)
pkg_checker = helpers.PackageChecker(package_to_test, install_env_dir)
linked_file_path = pkg_checker.find_installed(file_in_package_to_test)
assert linked_file_path
assert linked_file_path.exists()
assert not linked_file_path.is_symlink()
cache_file = existing_cache / test_pkg / xtensor_hpp
assert cache_file.stat().st_dev == linked_file.stat().st_dev
assert cache_file.stat().st_ino != linked_file.stat().st_ino
linked_file_rel_path = linked_file_path.relative_to(install_env_dir)
cache_file = existing_cache / test_pkg / linked_file_rel_path
assert cache_file.stat().st_dev == linked_file_path.stat().st_dev
assert cache_file.stat().st_ino != linked_file_path.stat().st_ino
@pytest.mark.skipif(
platform.system() == "Windows",
@ -74,23 +80,26 @@ class TestLinking:
)
def test_always_softlink(self, existing_cache, test_pkg):
helpers.create(
"xtensor",
package_to_test,
"-n",
TestLinking.env_name,
"--json",
"--always-softlink",
no_dry_run=True,
)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
install_env_dir = helpers.get_env(TestLinking.env_name)
pkg_checker = helpers.PackageChecker(package_to_test, install_env_dir)
linked_file_path = pkg_checker.find_installed(file_in_package_to_test)
assert linked_file_path
assert linked_file_path.exists()
assert linked_file_path.is_symlink()
assert linked_file.exists()
assert linked_file.is_symlink()
linked_file_rel_path = linked_file_path.relative_to(install_env_dir)
cache_file = existing_cache / test_pkg / linked_file_rel_path
cache_file = existing_cache / test_pkg / xtensor_hpp
assert cache_file.stat().st_dev == linked_file.stat().st_dev
assert cache_file.stat().st_ino == linked_file.stat().st_ino
assert os.readlink(linked_file) == str(cache_file)
assert cache_file.stat().st_dev == linked_file_path.stat().st_dev
assert cache_file.stat().st_ino == linked_file_path.stat().st_ino
assert os.readlink(linked_file_path) == str(cache_file)
@pytest.mark.parametrize("allow_softlinks", [True, False])
@pytest.mark.parametrize("always_copy", [True, False])
@ -98,7 +107,7 @@ class TestLinking:
if platform.system() != "Linux":
pytest.skip("o/s is not linux")
create_args = ["xtensor", "-n", TestLinking.env_name, "--json"]
create_args = [package_to_test, "-n", TestLinking.env_name, "--json"]
if allow_softlinks:
create_args.append("--allow-softlinks")
if always_copy:
@ -109,23 +118,29 @@ class TestLinking:
is_softlink = not same_device and allow_softlinks and not always_copy
is_hardlink = same_device and not always_copy
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
install_env_dir = helpers.get_env(TestLinking.env_name)
pkg_checker = helpers.PackageChecker(package_to_test, install_env_dir)
linked_file_path = pkg_checker.find_installed(file_in_package_to_test)
assert linked_file_path
assert linked_file_path.exists()
cache_file = existing_cache / test_pkg / xtensor_hpp
assert cache_file.stat().st_dev == linked_file.stat().st_dev
assert (cache_file.stat().st_ino == linked_file.stat().st_ino) == is_hardlink
assert linked_file.is_symlink() == is_softlink
linked_file_rel_path = linked_file_path.relative_to(install_env_dir)
cache_file = existing_cache / test_pkg / linked_file_rel_path
assert cache_file.stat().st_dev == linked_file_path.stat().st_dev
assert (cache_file.stat().st_ino == linked_file_path.stat().st_ino) == is_hardlink
assert linked_file_path.is_symlink() == is_softlink
def test_unlink_missing_file(self):
helpers.create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
helpers.create(package_to_test, "-n", TestLinking.env_name, "--json", no_dry_run=True)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert not linked_file.is_symlink()
pkg_checker = helpers.PackageChecker(package_to_test, helpers.get_env(TestLinking.env_name))
linked_file_path = pkg_checker.find_installed(file_in_package_to_test)
assert linked_file_path
assert linked_file_path.exists()
assert not linked_file_path.is_symlink()
os.remove(linked_file)
helpers.remove("xtensor", "-n", TestLinking.env_name)
os.remove(linked_file_path)
helpers.remove(package_to_test, "-n", TestLinking.env_name)
@pytest.mark.skipif(
sys.platform == "darwin" and platform.machine() == "arm64",

View File

@ -2,6 +2,7 @@ import os
import platform
import shutil
import subprocess
import glob
from pathlib import Path
from typing import Optional
@ -9,6 +10,9 @@ import pytest
from . import helpers
package_to_check = "xtensor"
file_to_find_in_package = "xtensor.hpp"
def find_cache_archive(cache: Path, pkg_name: str) -> Optional[Path]:
"""Find the archive used in cache from the complete build name."""
@ -29,12 +33,12 @@ def find_pkg_build(cache: Path, name: str) -> str:
@pytest.fixture(scope="module")
def tmp_shared_cache_xtensor(tmp_path_factory: pytest.TempPathFactory):
"""Create shared cache folder with an xtensor package."""
root = tmp_path_factory.mktemp("xtensor")
def tmp_shared_cache_test_pkg(tmp_path_factory: pytest.TempPathFactory):
"""Create shared cache folder with a test package."""
root = tmp_path_factory.mktemp(package_to_check)
helpers.create(
"-n",
"xtensor",
package_to_check,
"--no-env",
"--no-rc",
"-r",
@ -42,7 +46,7 @@ def tmp_shared_cache_xtensor(tmp_path_factory: pytest.TempPathFactory):
"-c",
"conda-forge",
"--no-deps",
"xtensor",
package_to_check,
no_dry_run=True,
)
return root / "pkgs"
@ -56,62 +60,69 @@ def tmp_cache_writable(request) -> bool:
@pytest.fixture
def tmp_cache(
tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path, tmp_cache_writable: bool
tmp_root_prefix: Path, tmp_shared_cache_test_pkg: Path, tmp_cache_writable: bool
) -> Path:
"""The default cache folder associated with the root_prefix and an xtensor package."""
"""The default cache folder associated with the root_prefix and a test package."""
cache: Path = tmp_root_prefix / "pkgs"
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
shutil.copytree(tmp_shared_cache_test_pkg, cache, dirs_exist_ok=True)
if not tmp_cache_writable:
helpers.recursive_chmod(cache, 0o500)
return cache
@pytest.fixture
def tmp_cache_xtensor_dir(tmp_cache: Path) -> Path:
"""The location of the Xtensor cache directory within the package cache."""
return tmp_cache / find_pkg_build(tmp_cache, "xtensor")
def tmp_cache_test_package_dir(tmp_cache: Path) -> Path:
"""The location of the package-t-test's cache directory within the package cache."""
return tmp_cache / find_pkg_build(tmp_cache, package_to_check)
@pytest.fixture
def tmp_cache_xtensor_pkg(tmp_cache: Path) -> Path:
"""The location of the Xtensor cache artifact (tarball) within the cache directory."""
return find_cache_archive(tmp_cache, find_pkg_build(tmp_cache, "xtensor"))
def tmp_cache_test_pkg(tmp_cache: Path) -> Path:
"""The location of the package-to-test's cache artifact (tarball) within the cache directory."""
return find_cache_archive(tmp_cache, find_pkg_build(tmp_cache, package_to_check))
@pytest.fixture
def tmp_cache_xtensor_hpp(tmp_cache_xtensor_dir: Path) -> Path:
"""The location of the Xtensor header (part of the package) within the cache directory."""
return tmp_cache_xtensor_dir / helpers.xtensor_hpp
def tmp_cache_file_in_test_package(tmp_cache_test_package_dir: Path) -> Path:
"""The location of the file in the package to test within the cache directory."""
pkg_checker = helpers.PackageChecker(
package_to_check, tmp_cache_test_package_dir, require_manifest=False
)
return pkg_checker.find_installed(file_to_find_in_package)
class TestPkgCache:
def test_extracted_file_deleted(self, tmp_home, tmp_cache_xtensor_hpp, tmp_root_prefix):
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
os.remove(tmp_cache_xtensor_hpp)
def test_extracted_file_deleted(
self, tmp_home, tmp_cache_file_in_test_package, tmp_root_prefix
):
old_ino = tmp_cache_file_in_test_package.stat().st_ino
os.remove(tmp_cache_file_in_test_package)
env_name = "some_env"
helpers.create("xtensor", "-n", env_name, no_dry_run=True)
helpers.create(package_to_check, "-n", env_name, no_dry_run=True)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
linked_file_stats = linked_file.stat()
assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert tmp_cache_file_in_test_package.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_file_in_test_package.stat().st_ino == linked_file_stats.st_ino
assert old_ino != linked_file_stats.st_ino
@pytest.mark.parametrize("safety_checks", ["disabled", "warn", "enabled"])
def test_extracted_file_corrupted(
self, tmp_home, tmp_root_prefix, tmp_cache_xtensor_hpp, safety_checks
self, tmp_home, tmp_root_prefix, tmp_cache_file_in_test_package, safety_checks
):
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
old_ino = tmp_cache_file_in_test_package.stat().st_ino
with open(tmp_cache_xtensor_hpp, "w") as f:
with open(tmp_cache_file_in_test_package, "w") as f:
f.write("//corruption")
env_name = "x1"
helpers.create(
"xtensor",
package_to_check,
"-n",
env_name,
"--json",
@ -120,12 +131,14 @@ class TestPkgCache:
no_dry_run=True,
)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
linked_file_stats = linked_file.stat()
assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert tmp_cache_file_in_test_package.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_file_in_test_package.stat().st_ino == linked_file_stats.st_ino
if safety_checks == "enabled":
assert old_ino != linked_file_stats.st_ino
@ -136,66 +149,72 @@ class TestPkgCache:
self,
tmp_home,
tmp_root_prefix,
tmp_cache_xtensor_pkg,
tmp_cache_xtensor_hpp,
tmp_cache_test_pkg,
tmp_cache_file_in_test_package,
tmp_cache,
):
assert tmp_cache_xtensor_pkg.exists()
os.remove(tmp_cache_xtensor_pkg)
assert tmp_cache_test_pkg.exists()
os.remove(tmp_cache_test_pkg)
env_name = "x1"
helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
helpers.create(package_to_check, "-n", env_name, "--json", no_dry_run=True)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
linked_file_stats = linked_file.stat()
assert not tmp_cache_xtensor_pkg.exists()
assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert not tmp_cache_test_pkg.exists()
assert tmp_cache_file_in_test_package.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_file_in_test_package.stat().st_ino == linked_file_stats.st_ino
def test_tarball_and_extracted_file_deleted(
self, tmp_home, tmp_root_prefix, tmp_cache_xtensor_pkg, tmp_cache_xtensor_hpp
self, tmp_home, tmp_root_prefix, tmp_cache_test_pkg, tmp_cache_file_in_test_package
):
xtensor_pkg_size = tmp_cache_xtensor_pkg.stat().st_size
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
os.remove(tmp_cache_xtensor_hpp)
os.remove(tmp_cache_xtensor_pkg)
test_pkg_size = tmp_cache_test_pkg.stat().st_size
old_ino = tmp_cache_file_in_test_package.stat().st_ino
os.remove(tmp_cache_file_in_test_package)
os.remove(tmp_cache_test_pkg)
env_name = "x1"
helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
helpers.create(package_to_check, "-n", env_name, "--json", no_dry_run=True)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
linked_file_stats = linked_file.stat()
assert tmp_cache_xtensor_pkg.exists()
assert xtensor_pkg_size == tmp_cache_xtensor_pkg.stat().st_size
assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert tmp_cache_test_pkg.exists()
assert test_pkg_size == tmp_cache_test_pkg.stat().st_size
assert tmp_cache_file_in_test_package.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_file_in_test_package.stat().st_ino == linked_file_stats.st_ino
assert old_ino != linked_file_stats.st_ino
def test_tarball_corrupted_and_extracted_file_deleted(
self, tmp_home, tmp_root_prefix, tmp_cache_xtensor_pkg, tmp_cache_xtensor_hpp
self, tmp_home, tmp_root_prefix, tmp_cache_test_pkg, tmp_cache_file_in_test_package
):
xtensor_pkg_size = tmp_cache_xtensor_pkg.stat().st_size
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
os.remove(tmp_cache_xtensor_hpp)
os.remove(tmp_cache_xtensor_pkg)
with open(tmp_cache_xtensor_pkg, "w") as f:
test_pkg_size = tmp_cache_test_pkg.stat().st_size
old_ino = tmp_cache_file_in_test_package.stat().st_ino
os.remove(tmp_cache_file_in_test_package)
os.remove(tmp_cache_test_pkg)
with open(tmp_cache_test_pkg, "w") as f:
f.write("")
env_name = "x1"
helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True)
helpers.create(package_to_check, "-n", env_name, "--json", no_dry_run=True)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
linked_file_stats = linked_file.stat()
assert tmp_cache_xtensor_pkg.exists()
assert xtensor_pkg_size == tmp_cache_xtensor_pkg.stat().st_size
assert tmp_cache_xtensor_hpp.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_xtensor_hpp.stat().st_ino == linked_file_stats.st_ino
assert tmp_cache_test_pkg.exists()
assert test_pkg_size == tmp_cache_test_pkg.stat().st_size
assert tmp_cache_file_in_test_package.stat().st_dev == linked_file_stats.st_dev
assert tmp_cache_file_in_test_package.stat().st_ino == linked_file_stats.st_ino
assert old_ino != linked_file_stats.st_ino
@pytest.mark.parametrize("safety_checks", ("disabled", "warn", "enabled"))
@ -203,18 +222,18 @@ class TestPkgCache:
self,
tmp_home,
tmp_root_prefix,
tmp_cache_xtensor_pkg,
tmp_cache_xtensor_hpp,
tmp_cache_test_pkg,
tmp_cache_file_in_test_package,
safety_checks,
):
with open(tmp_cache_xtensor_hpp, "w") as f:
with open(tmp_cache_file_in_test_package, "w") as f:
f.write("//corruption")
helpers.recursive_chmod(tmp_cache_xtensor_pkg, 0o500)
# old_ino = tmp_cache_xtensor_hpp.stat().st_ino
helpers.recursive_chmod(tmp_cache_test_pkg, 0o500)
# old_ino = tmp_cache_file_in_test_package.stat().st_ino
env = "x1"
cmd_args = (
"xtensor",
package_to_check,
"-n",
"--safety-checks",
safety_checks,
@ -228,10 +247,10 @@ class TestPkgCache:
@pytest.fixture
def tmp_cache_alt(tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path) -> Path:
def tmp_cache_alt(tmp_root_prefix: Path, tmp_shared_cache_test_pkg: Path) -> Path:
"""Make an alternative package cache outside the root prefix."""
cache = tmp_root_prefix / "more-pkgs" # Creating under root prefix to leverage eager cleanup
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
shutil.copytree(tmp_shared_cache_test_pkg, cache, dirs_exist_ok=True)
return cache
@ -257,13 +276,16 @@ class TestMultiplePkgCaches:
os.environ["CONDA_PKGS_DIRS"] = f"{cache}"
env_name = "some_env"
res = helpers.create("-n", env_name, "xtensor", "-v", "--json", no_dry_run=True)
res = helpers.create("-n", env_name, package_to_check, "-v", "--json", no_dry_run=True)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
pkg_name = helpers.get_concrete_pkg(res, "xtensor")
cache_file = cache / pkg_name / helpers.xtensor_hpp
pkg_name = helpers.get_concrete_pkg(res, package_to_check)
installed_file_rel_path = linked_file.relative_to(env_dir)
cache_file = cache / pkg_name / installed_file_rel_path
assert cache_file.exists()
@ -282,17 +304,20 @@ class TestMultiplePkgCaches:
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = "some_env"
res = helpers.create("-n", env_name, "xtensor", "--json", no_dry_run=True)
res = helpers.create("-n", env_name, package_to_check, "--json", no_dry_run=True)
linked_file = tmp_root_prefix / "envs" / env_name / helpers.xtensor_hpp
env_dir = tmp_root_prefix / "envs" / env_name
pkg_checker = helpers.PackageChecker(package_to_check, env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
pkg_name = helpers.get_concrete_pkg(res, "xtensor")
pkg_name = helpers.get_concrete_pkg(res, package_to_check)
installed_file_rel_path = linked_file.relative_to(env_dir)
# A previous version of this test was attempting to test that the installed file
# was linked from the first writable pkgs dir, however it passed only because of a bug
# in how it used pytest.
# The first pkgs dir can be used to link, even if it is not writable.
cache_file = tmp_cache / pkg_name / helpers.xtensor_hpp
cache_file = tmp_cache / pkg_name / installed_file_rel_path
assert cache_file.exists()
@ -300,38 +325,51 @@ class TestMultiplePkgCaches:
assert linked_file.stat().st_ino == cache_file.stat().st_ino
def test_no_writable(self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt):
helpers.rmtree(tmp_cache / find_pkg_build(tmp_cache, "xtensor"))
helpers.rmtree(tmp_cache / find_pkg_build(tmp_cache, package_to_check))
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
helpers.create("-n", "myenv", "xtensor", "--json", no_dry_run=True)
helpers.create("-n", "myenv", package_to_check, "--json", no_dry_run=True)
def test_no_writable_extracted_dir_corrupted(self, tmp_home, tmp_root_prefix, tmp_cache):
(tmp_cache / find_pkg_build(tmp_cache, "xtensor") / helpers.xtensor_hpp).unlink()
old_cache_dir = tmp_cache / find_pkg_build(tmp_cache, package_to_check)
if old_cache_dir.is_dir():
files = glob.glob(
f"**{file_to_find_in_package}", recursive=True, root_dir=old_cache_dir
)
for file in files:
file.unlink()
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache}"
with pytest.raises(subprocess.CalledProcessError):
helpers.create("-n", "myenv", "xtensor", "-vv", "--json", no_dry_run=True)
helpers.create("-n", "myenv", package_to_check, "-vv", "--json", no_dry_run=True)
def test_first_writable_extracted_dir_corrupted(
self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
):
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
test_pkg_bld = find_pkg_build(tmp_cache, package_to_check)
helpers.rmtree(tmp_cache) # convenience for cache teardown
os.makedirs(tmp_cache)
open(tmp_cache / "urls.txt", "w") # chmod only set read-only flag on Windows
helpers.recursive_chmod(tmp_cache, 0o500)
helpers.rmtree(tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp)
tmp_cache_alt_pkg_dir = tmp_cache_alt / test_pkg_bld
if tmp_cache_alt_pkg_dir.is_dir():
files = tmp_cache_alt_pkg_dir.glob(f"**/{file_to_find_in_package}")
for file in files:
helpers.rmtree(file)
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = "myenv"
helpers.create("-n", env_name, "xtensor", "-vv", "--json", no_dry_run=True)
helpers.create("-n", env_name, package_to_check, "-vv", "--json", no_dry_run=True)
linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
install_env_dir = helpers.get_env(env_name)
pkg_checker = helpers.PackageChecker(package_to_check, install_env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
# check repodata files
@ -341,11 +379,12 @@ class TestMultiplePkgCaches:
assert same_repodata_json_solv(tmp_cache_alt)
# check tarballs
assert find_cache_archive(tmp_cache, xtensor_bld) is None
assert find_cache_archive(tmp_cache_alt, xtensor_bld).exists()
assert find_cache_archive(tmp_cache, test_pkg_bld) is None
assert find_cache_archive(tmp_cache_alt, test_pkg_bld).exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
linked_file_rel_path = linked_file.relative_to(install_env_dir)
non_writable_cache_file = tmp_cache / test_pkg_bld / linked_file_rel_path
writable_cache_file = tmp_cache_alt / test_pkg_bld / linked_file_rel_path
# check extracted files
assert not non_writable_cache_file.exists()
@ -361,19 +400,21 @@ class TestMultiplePkgCaches:
tmp_home,
tmp_cache,
tmp_cache_alt,
tmp_cache_xtensor_dir,
tmp_cache_test_package_dir,
):
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
helpers.rmtree(find_cache_archive(tmp_cache, xtensor_bld))
test_pkg_bld = find_pkg_build(tmp_cache, package_to_check)
helpers.rmtree(find_cache_archive(tmp_cache, test_pkg_bld))
helpers.rmtree(tmp_cache_alt)
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = "myenv"
helpers.create("-n", env_name, "xtensor", "--json", no_dry_run=True)
helpers.create("-n", env_name, package_to_check, "--json", no_dry_run=True)
linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
install_env_dir = helpers.get_env(env_name)
pkg_checker = helpers.PackageChecker(package_to_check, install_env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
# check repodata files
@ -383,11 +424,12 @@ class TestMultiplePkgCaches:
assert repodata_json(tmp_cache_alt) == set()
# check tarballs
assert find_cache_archive(tmp_cache, xtensor_bld) is None
assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
assert find_cache_archive(tmp_cache, test_pkg_bld) is None
assert find_cache_archive(tmp_cache_alt, test_pkg_bld) is None
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
linked_file_rel_path = linked_file.relative_to(install_env_dir)
non_writable_cache_file = tmp_cache / test_pkg_bld / linked_file_rel_path
writable_cache_file = tmp_cache_alt / test_pkg_bld / linked_file_rel_path
# check extracted files
assert non_writable_cache_file.exists()
@ -400,21 +442,24 @@ class TestMultiplePkgCaches:
def test_missing_extracted_dir_in_non_writable_cache(
self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
):
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
helpers.rmtree(tmp_cache / xtensor_bld)
test_pkg_bld = find_pkg_build(tmp_cache, package_to_check)
helpers.rmtree(tmp_cache / test_pkg_bld)
helpers.rmtree(tmp_cache_alt)
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = "myenv"
helpers.create("-n", env_name, "xtensor", "--json", no_dry_run=True)
helpers.create("-n", env_name, package_to_check, "--json", no_dry_run=True)
linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
install_env_dir = helpers.get_env(env_name)
pkg_checker = helpers.PackageChecker(package_to_check, install_env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
linked_file_rel_path = linked_file.relative_to(install_env_dir)
non_writable_cache_file = tmp_cache / test_pkg_bld / linked_file_rel_path
writable_cache_file = tmp_cache_alt / test_pkg_bld / linked_file_rel_path
# check repodata files
assert repodata_json(tmp_cache) != set()
@ -423,8 +468,8 @@ class TestMultiplePkgCaches:
assert repodata_json(tmp_cache_alt) == set()
# check tarballs
assert find_cache_archive(tmp_cache, xtensor_bld).exists()
assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
assert find_cache_archive(tmp_cache, test_pkg_bld).exists()
assert find_cache_archive(tmp_cache_alt, test_pkg_bld) is None
# check extracted files
assert not non_writable_cache_file.exists()
@ -437,8 +482,13 @@ class TestMultiplePkgCaches:
def test_corrupted_extracted_dir_in_non_writable_cache(
self, tmp_home, tmp_root_prefix, tmp_cache, tmp_cache_alt
):
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
helpers.rmtree(tmp_cache / xtensor_bld / helpers.xtensor_hpp)
test_pkg_bld = find_pkg_build(tmp_cache, package_to_check)
tmp_cache_test_pkg_dir = Path(tmp_cache / test_pkg_bld)
if tmp_cache_test_pkg_dir.is_dir():
files = tmp_cache_test_pkg_dir.glob(f"**/{file_to_find_in_package}")
for file in files:
helpers.rmtree(file)
helpers.rmtree(tmp_cache_alt) # convenience for cache teardown
os.makedirs(tmp_cache_alt)
helpers.recursive_chmod(tmp_cache, 0o500)
@ -446,9 +496,11 @@ class TestMultiplePkgCaches:
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = "myenv"
helpers.create("-n", env_name, "-vv", "xtensor", "--json", no_dry_run=True)
helpers.create("-n", env_name, "-vv", package_to_check, "--json", no_dry_run=True)
linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
install_env_dir = helpers.get_env(env_name)
pkg_checker = helpers.PackageChecker(package_to_check, install_env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
# check repodata files
@ -458,15 +510,16 @@ class TestMultiplePkgCaches:
assert repodata_json(tmp_cache_alt) == set()
# check tarballs
assert find_cache_archive(tmp_cache, xtensor_bld).exists()
assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
assert find_cache_archive(tmp_cache, test_pkg_bld).exists()
assert find_cache_archive(tmp_cache_alt, test_pkg_bld) is None
# check extracted dir
assert (tmp_cache / xtensor_bld).exists()
assert (tmp_cache_alt / xtensor_bld).exists()
assert (tmp_cache / test_pkg_bld).exists()
assert (tmp_cache_alt / test_pkg_bld).exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
linked_file_rel_path = linked_file.relative_to(install_env_dir)
non_writable_cache_file = tmp_cache / test_pkg_bld / linked_file_rel_path
writable_cache_file = tmp_cache_alt / test_pkg_bld / linked_file_rel_path
# check extracted files
assert not non_writable_cache_file.exists()
@ -484,19 +537,21 @@ class TestMultiplePkgCaches:
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache},{tmp_cache_alt}"
env_name = "myenv"
xtensor_bld = find_pkg_build(tmp_cache, "xtensor")
test_pkg_bld = find_pkg_build(tmp_cache, package_to_check)
helpers.create(
"-n",
env_name,
"xtensor",
package_to_check,
"-vv",
"--json",
"--repodata-ttl=0",
no_dry_run=True,
)
linked_file = helpers.get_env(env_name, helpers.xtensor_hpp)
install_env_dir = helpers.get_env(env_name)
pkg_checker = helpers.PackageChecker(package_to_check, install_env_dir)
linked_file = pkg_checker.find_installed(file_to_find_in_package)
assert linked_file.exists()
# check repodata files
@ -508,15 +563,16 @@ class TestMultiplePkgCaches:
assert same_repodata_json_solv(tmp_cache_alt)
# check tarballs
assert find_cache_archive(tmp_cache, xtensor_bld).exists()
assert find_cache_archive(tmp_cache_alt, xtensor_bld) is None
assert find_cache_archive(tmp_cache, test_pkg_bld).exists()
assert find_cache_archive(tmp_cache_alt, test_pkg_bld) is None
# check extracted dir
assert (tmp_cache / xtensor_bld).exists()
assert not (tmp_cache_alt / xtensor_bld).exists()
assert (tmp_cache / test_pkg_bld).exists()
assert not (tmp_cache_alt / test_pkg_bld).exists()
non_writable_cache_file = tmp_cache / xtensor_bld / helpers.xtensor_hpp
writable_cache_file = tmp_cache_alt / xtensor_bld / helpers.xtensor_hpp
linked_file_rel_path = linked_file.relative_to(install_env_dir)
non_writable_cache_file = tmp_cache / test_pkg_bld / linked_file_rel_path
writable_cache_file = tmp_cache_alt / test_pkg_bld / linked_file_rel_path
# check extracted files
assert non_writable_cache_file.exists()