Move to Ruff (#3011)

* Use Ruff LSP

* Remove unused imports

* More Ruff fixes

* Remove unused test vars

* Explicit import

* Explicit import

* Explicit import

* Explicit import

* Explicit import

* Warning fixes

* Import fixtures

* Add ruff pre-commit

* Remove pre-commits checks superceded by ruff

* Fix imports

* Fix ruff warning

* Fix test_linking fixtures

* Fix typo

* Fix test_update fixture import

* Python line-length to 100

* Reformat Python code line length

* Fix typo
This commit is contained in:
Antoine Prouvost 2023-11-27 19:10:34 +01:00 committed by GitHub
parent 7f325df6ad
commit 1230b92094
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 427 additions and 698 deletions

View File

@ -1,15 +1,5 @@
exclude: libmamba/tests/data/repodata_json_cache*
repos:
- repo: https://github.com/psf/black
rev: 23.9.1
hooks:
- id: black
args: [--safe, --quiet]
- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
hooks:
- id: blacken-docs
additional_dependencies: [black==22.3.0]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
@ -26,22 +16,17 @@ repos:
args: [--autofix]
- id: debug-statements
language_version: python3
- repo: https://github.com/pre-commit/mirrors-isort
rev: v5.10.1
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.6
hooks:
- id: isort
exclude: tests/data
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
- id: ruff
args: [ --fix ]
- id: ruff-format
- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
hooks:
- id: flake8
language_version: python3
additional_dependencies:
- flake8-typing-imports==1.15.0
- flake8-builtins==2.1.0
- flake8-bugbear==23.9.16
- flake8-isort==6.1.0
exclude: libmambapy/src
- id: blacken-docs
additional_dependencies: [black==22.3.0]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v16.0.6
hooks:

View File

@ -14,9 +14,9 @@ dependencies:
- sel(osx): lldb
- sel(linux): valgrind # Out of date on MacOS
# Python LSP support
- ruff
- python-lsp-server-base
- python-lsp-black
- black
- ruff-lsp
# Interactive Python tools
- jupyterlab
- ipython

View File

@ -51,9 +51,7 @@ def figure_wrapper(directive, node, caption):
figure_node["align"] = node.attributes.pop("align")
parsed = nodes.Element()
directive.state.nested_parse(
ViewList([caption], source=""), directive.content_offset, parsed
)
directive.state.nested_parse(ViewList([caption], source=""), directive.content_offset, parsed)
caption_node = nodes.caption(parsed[0].rawsource, "", *parsed[0].children)
caption_node.source = parsed[0].source
caption_node.line = parsed[0].line
@ -87,8 +85,7 @@ class Mermaid(Directive):
if self.content:
return [
document.reporter.warning(
"Mermaid directive cannot have both content and "
"a filename argument",
"Mermaid directive cannot have both content and " "a filename argument",
line=self.lineno,
)
]
@ -102,8 +99,7 @@ class Mermaid(Directive):
except (IOError, OSError): # noqa
return [
document.reporter.warning(
"External Mermaid file %r not found or reading "
"it failed" % filename,
"External Mermaid file %r not found or reading " "it failed" % filename,
line=self.lineno,
)
]
@ -144,9 +140,9 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
fmt = "png"
mermaid_cmd = self.builder.config.mermaid_cmd
hashkey = (
code + str(options) + str(self.builder.config.mermaid_sequence_config)
).encode("utf-8")
hashkey = (code + str(options) + str(self.builder.config.mermaid_sequence_config)).encode(
"utf-8"
)
basename = "%s-%s" % (prefix, sha1(hashkey).hexdigest())
fname = "%s.%s" % (basename, fmt)
@ -189,8 +185,7 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
if p.returncode != 0:
raise MermaidError(
"Mermaid exited with error:\n[stderr]\n%s\n"
"[stdout]\n%s" % (stderr, stdout)
"Mermaid exited with error:\n[stderr]\n%s\n" "[stdout]\n%s" % (stderr, stdout)
)
if not os.path.isfile(outfn):
raise MermaidError(
@ -200,9 +195,7 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
return relfn, outfn
def _render_mm_html_raw(
self, node, code, options, prefix="mermaid", imgcls=None, alt=None
):
def _render_mm_html_raw(self, node, code, options, prefix="mermaid", imgcls=None, alt=None):
if "align" in node:
tag_template = """<div align="{align}" class="mermaid align-{align}">
{code}
@ -213,9 +206,7 @@ def _render_mm_html_raw(
{code}
</div>"""
self.body.append(
tag_template.format(align=node.get("align"), code=self.encode(code))
)
self.body.append(tag_template.format(align=node.get("align"), code=self.encode(code)))
raise nodes.SkipNode
@ -229,8 +220,7 @@ def render_mm_html(self, node, code, options, prefix="mermaid", imgcls=None, alt
try:
if fmt not in ("png", "svg"):
raise MermaidError(
"mermaid_output_format must be one of 'raw', 'png', "
"'svg', but is %r" % fmt
"mermaid_output_format must be one of 'raw', 'png', " "'svg', but is %r" % fmt
)
fname, outfn = render_mm(self, code, options, fmt, prefix)
@ -295,8 +285,7 @@ def render_mm_latex(self, node, code, options, prefix="mermaid"):
if p.returncode != 0:
raise MermaidError(
"PdfCrop exited with error:\n[stderr]\n%s\n"
"[stdout]\n%s" % (stderr, stdout)
"PdfCrop exited with error:\n[stderr]\n%s\n" "[stdout]\n%s" % (stderr, stdout)
)
if not os.path.isfile(outfn):
raise MermaidError(
@ -304,9 +293,7 @@ def render_mm_latex(self, node, code, options, prefix="mermaid"):
"[stdout]\n%s" % (stderr, stdout)
)
fname = "{filename[0]}-crop{filename[1]}".format(
filename=os.path.splitext(fname)
)
fname = "{filename[0]}-crop{filename[1]}".format(filename=os.path.splitext(fname))
is_inline = self.is_inline(node)
if is_inline:

View File

@ -208,9 +208,7 @@ class MermaidDiagram(InheritanceDiagram):
return [figure]
def html_visit_mermaid_inheritance(
self: HTMLTranslator, node: inheritance_diagram
) -> None:
def html_visit_mermaid_inheritance(self: HTMLTranslator, node: inheritance_diagram) -> None:
"""
Output the graph for HTML. This will insert a PNG with clickable
image map.
@ -233,9 +231,7 @@ def html_visit_mermaid_inheritance(
urls[child["reftitle"]] = child.get("refuri")
elif child.get("refid") is not None:
if mermaid_output_format == "SVG":
urls[child["reftitle"]] = (
"../" + current_filename + "#" + child.get("refid")
)
urls[child["reftitle"]] = "../" + current_filename + "#" + child.get("refid")
else:
urls[child["reftitle"]] = "#" + child.get("refid")
dotcode = graph.generate_dot(name, urls, env=self.builder.env)
@ -251,9 +247,7 @@ def html_visit_mermaid_inheritance(
raise nodes.SkipNode
def latex_visit_mermaid_inheritance(
self: LaTeXTranslator, node: inheritance_diagram
) -> None:
def latex_visit_mermaid_inheritance(self: LaTeXTranslator, node: inheritance_diagram) -> None:
"""
Output the graph for LaTeX. This will insert a PDF.
"""
@ -271,9 +265,7 @@ def latex_visit_mermaid_inheritance(
raise nodes.SkipNode
def texinfo_visit_mermaid_inheritance(
self: TexinfoTranslator, node: inheritance_diagram
) -> None:
def texinfo_visit_mermaid_inheritance(self: TexinfoTranslator, node: inheritance_diagram) -> None:
"""
Output the graph for Texinfo. This will insert a PNG.
"""

View File

@ -48,17 +48,11 @@ def main():
parser = argparse.ArgumentParser(description="Generate binary header output")
parser.add_argument("-i", "--input", required=True, help="Input file", type=Path)
parser.add_argument("-o", "--out", required=True, help="Output file", type=Path)
parser.add_argument(
"-v", "--var", required=True, help="Variable name to use in file"
)
parser.add_argument(
"-e", "--extern", action="store_true", help="Add 'extern' declaration"
)
parser.add_argument("-v", "--var", required=True, help="Variable name to use in file")
parser.add_argument("-e", "--extern", action="store_true", help="Add 'extern' declaration")
args = parser.parse_args()
argv_pretty = " ".join(
Path(arg).name if "/" in arg or "\\" in arg else arg for arg in sys.argv
)
argv_pretty = " ".join(Path(arg).name if "/" in arg or "\\" in arg else arg for arg in sys.argv)
comment = f"/* This file was generated using {argv_pretty} */"
out = bin2header(comment, args.input.read_bytes(), args.var, args.extern)

View File

@ -1,5 +1,7 @@
import libmambapy.version
from libmambapy.bindings.legacy import * # Legacy which used to combine everything
# Legacy which used to combine everything
from libmambapy.bindings.legacy import * # noqa: F403
# Define top-level attributes
__version__ = libmambapy.version.__version__

View File

@ -977,9 +977,7 @@ class PackageInfo:
@typing.overload
def __init__(self, name: str) -> None: ...
@typing.overload
def __init__(
self, name: str, version: str, build_string: str, build_number: int
) -> None: ...
def __init__(self, name: str, version: str, build_string: str, build_number: int) -> None: ...
@property
def build_number(self) -> int:
"""
@ -1312,9 +1310,7 @@ class RootRole:
pass
class Solver:
def __init__(
self, arg0: Pool, arg1: typing.List[typing.Tuple[int, int]]
) -> None: ...
def __init__(self, arg0: Pool, arg1: typing.List[typing.Tuple[int, int]]) -> None: ...
def add_constraint(self, arg0: str) -> None: ...
def add_global_job(self, arg0: int) -> None: ...
def add_jobs(self, arg0: typing.List[str], arg1: int) -> None: ...
@ -1326,9 +1322,7 @@ class Solver:
def must_solve(self) -> None: ...
def problems_to_str(self) -> str: ...
def set_flags(self, arg0: typing.List[typing.Tuple[int, int]]) -> None: ...
def set_postsolve_flags(
self, arg0: typing.List[typing.Tuple[int, int]]
) -> None: ...
def set_postsolve_flags(self, arg0: typing.List[typing.Tuple[int, int]]) -> None: ...
def solve(self) -> bool: ...
def try_solve(self) -> bool: ...
pass

View File

@ -1,2 +0,0 @@
[pycodestyle]
ignore = E5,W1,W2,W3,W5

View File

@ -121,9 +121,7 @@ class RepoSigner:
fout.write(root_md_serialized_unsigned)
# This overwrites the file with a signed version of the file.
cct_root_signing.sign_root_metadata_via_gpg(
root_filepath, root_keys[0]["fingerprint"]
)
cct_root_signing.sign_root_metadata_via_gpg(root_filepath, root_keys[0]["fingerprint"])
# Load untrusted signed root metadata.
signed_root_md = cct_common.load_metadata_from_file(root_filepath)
@ -133,9 +131,7 @@ class RepoSigner:
print("[reposigner] Root metadata signed & verified!")
def create_key_mgr(self, keys):
private_key_key_mgr = cct_common.PrivateKey.from_hex(
keys["key_mgr"][0]["private"]
)
private_key_key_mgr = cct_common.PrivateKey.from_hex(keys["key_mgr"][0]["private"])
pkg_mgr_pub_keys = [k["public"] for k in keys["pkg_mgr"]]
key_mgr = cct_metadata_construction.build_delegating_metadata(
metadata_type="key_mgr", # 'root' or 'key_mgr'
@ -156,9 +152,7 @@ class RepoSigner:
# let's run a verification
root_metadata = cct_common.load_metadata_from_file(self.folder / "1.root.json")
key_mgr_metadata = cct_common.load_metadata_from_file(
self.folder / "key_mgr.json"
)
key_mgr_metadata = cct_common.load_metadata_from_file(self.folder / "key_mgr.json")
cct_common.checkformat_signable(root_metadata)
@ -168,9 +162,7 @@ class RepoSigner:
root_delegations = root_metadata["signed"]["delegations"] # for brevity
cct_common.checkformat_delegations(root_delegations)
if "key_mgr" not in root_delegations:
raise ValueError(
'Missing expected delegation to "key_mgr" in root metadata.'
)
raise ValueError('Missing expected delegation to "key_mgr" in root metadata.')
cct_common.checkformat_delegation(root_delegations["key_mgr"])
# Doing delegation processing.
@ -283,14 +275,10 @@ class ChannelHandler(SimpleHTTPRequestHandler):
self.wfile.write(b"no valid api key received")
global_parser = argparse.ArgumentParser(
description="Start a multi-channel conda package server."
)
global_parser = argparse.ArgumentParser(description="Start a multi-channel conda package server.")
global_parser.add_argument("-p", "--port", type=int, default=8000, help="Port to use.")
channel_parser = argparse.ArgumentParser(
description="Start a simple conda package server."
)
channel_parser = argparse.ArgumentParser(description="Start a simple conda package server.")
channel_parser.add_argument(
"-d",
"--directory",

View File

@ -93,9 +93,7 @@ def tmp_clean_env(tmp_environ: None) -> None:
if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA", "XDG_")):
del os.environ[k]
def keep_in_path(
p: str, prefix: Optional[str] = tmp_environ.get("CONDA_PREFIX")
) -> bool:
def keep_in_path(p: str, prefix: Optional[str] = tmp_environ.get("CONDA_PREFIX")) -> bool:
if "condabin" in p:
return False
# On windows, PATH is also used for dyanamic libraries.

View File

@ -6,7 +6,6 @@ import random
import shutil
import string
import subprocess
import sys
from enum import Enum
from pathlib import Path
@ -36,9 +35,7 @@ class DryRun(Enum):
use_offline = False
channel = ["-c", "conda-forge"]
dry_run_tests = DryRun(
os.environ["MAMBA_DRY_RUN_TESTS"]
if ("MAMBA_DRY_RUN_TESTS" in os.environ)
else "OFF"
os.environ["MAMBA_DRY_RUN_TESTS"] if ("MAMBA_DRY_RUN_TESTS" in os.environ) else "OFF"
)
MAMBA_NO_PREFIX_CHECK = 1 << 0
@ -153,7 +150,7 @@ def install(*args, default_channel=True, no_rc=True, no_dry_run=False, **kwargs)
try:
j = json.loads(res)
return j
except:
except Exception:
print(res.decode())
return
if "--print-config-only" in args:

View File

@ -46,9 +46,7 @@ class WindowsProfiles:
"-Command",
"$PROFILE.CurrentUserAllHosts",
]
res = subprocess.run(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True
)
res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
return res.stdout.decode("utf-8").strip()
elif shell == "cmd.exe":
return None
@ -159,9 +157,7 @@ def call_interpreter(s, tmp_path, interpreter, interactive=False, env=None):
if interpreter == "cmd.exe":
mods = ["@chcp 65001>nul"]
for x in s:
if x.startswith("micromamba activate") or x.startswith(
"micromamba deactivate"
):
if x.startswith("micromamba activate") or x.startswith("micromamba deactivate"):
mods.append("call " + x)
else:
mods.append(x)
@ -306,15 +302,15 @@ def test_shell_init(
interpreter,
):
# TODO enable these tests also on win + bash!
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
umamba = helpers.get_umamba()
run_dir = tmp_path / "rundir"
run_dir.mkdir()
call = lambda s: call_interpreter(s, run_dir, interpreter)
def call(s):
return call_interpreter(s, run_dir, interpreter)
rpv = shvar("MAMBA_ROOT_PREFIX", interpreter)
s = [f"echo {rpv}"]
@ -407,9 +403,7 @@ def test_shell_init_deinit_root_prefix_files(
tmp_path,
interpreter,
):
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
umamba = helpers.get_umamba()
@ -495,9 +489,7 @@ def test_shell_init_deinit_contents(
tmp_path,
interpreter,
):
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
umamba = helpers.get_umamba()
@ -542,9 +534,7 @@ def test_shell_init_deinit_contents(
@pytest.mark.parametrize("interpreter", get_interpreters())
def test_env_activation(tmp_home, winreg_value, tmp_root_prefix, tmp_path, interpreter):
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
umamba = helpers.get_umamba()
@ -552,7 +542,8 @@ def test_env_activation(tmp_home, winreg_value, tmp_root_prefix, tmp_path, inter
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
call = lambda s: call_interpreter(s, tmp_path, interpreter, interactive=True)
def call(s):
return call_interpreter(s, tmp_path, interpreter, interactive=True)
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
@ -641,9 +632,7 @@ def test_activation_envvars(
tmp_path,
interpreter,
):
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
umamba = helpers.get_umamba()
@ -651,7 +640,8 @@ def test_activation_envvars(
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
call = lambda s: call_interpreter(s, tmp_path, interpreter, interactive=True)
def call(s):
return call_interpreter(s, tmp_path, interpreter, interactive=True)
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
@ -760,9 +750,7 @@ def test_unicode_activation(
tmp_path,
interpreter,
):
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
umamba = helpers.get_umamba()
@ -770,7 +758,8 @@ def test_unicode_activation(
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
call = lambda s: call_interpreter(s, tmp_path, interpreter, interactive=True)
def call(s):
return call_interpreter(s, tmp_path, interpreter, interactive=True)
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
@ -865,9 +854,7 @@ def test_unicode_activation(
@pytest.mark.parametrize("interpreter", get_interpreters())
def test_activate_path(tmp_empty_env, tmp_env_name, interpreter, tmp_path):
if interpreter not in valid_interpreters or (
plat == "win" and interpreter == "bash"
):
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
pytest.skip(f"{interpreter} not available")
# Activate env name

View File

@ -116,9 +116,7 @@ class TestConfigSources:
@pytest.mark.parametrize(
"rc_file", (("home", "dummy.yaml"), ("home", ".mambarc")), indirect=True
)
@pytest.mark.parametrize(
"rc_file_args", ({"override_channels_enabled": True},), indirect=True
)
@pytest.mark.parametrize("rc_file_args", ({"override_channels_enabled": True},), indirect=True)
@pytest.mark.parametrize("quiet_flag", ["-q", "--quiet"])
@pytest.mark.parametrize("norc", [False, True])
def test_config_sources(self, rc_file, quiet_flag, norc):
@ -172,15 +170,11 @@ class TestConfigSources:
),
indirect=True,
)
@pytest.mark.parametrize(
"rc_file_args", ({"override_channels_enabled": True},), indirect=True
)
@pytest.mark.parametrize("rc_file_args", ({"override_channels_enabled": True},), indirect=True)
def test_config_rc_file(self, rc_file, tmp_env_name):
srcs = config("sources", "-n", tmp_env_name).strip().splitlines()
short_name = str(rc_file).replace(os.path.expanduser("~"), "~")
expected_srcs = (
f"Configuration files (by precedence order):\n{short_name}".splitlines()
)
expected_srcs = f"Configuration files (by precedence order):\n{short_name}".splitlines()
assert srcs == expected_srcs
@pytest.mark.parametrize(
@ -188,9 +182,7 @@ class TestConfigSources:
[("home", "somefile.yml")],
indirect=True,
)
@pytest.mark.parametrize(
"rc_file_args", ({"override_channels_enabled": True},), indirect=True
)
@pytest.mark.parametrize("rc_file_args", ({"override_channels_enabled": True},), indirect=True)
def test_config_expand_user(self, rc_file):
rc_file_short = str(rc_file).replace(os.path.expanduser("~"), "~")
res = config("sources", "--rc-file", rc_file)
@ -258,9 +250,7 @@ class TestConfigList:
)
assert (
config(
"list", "--no-env", "--rc-file", rc_file, "-d", group_flag
).splitlines()
config("list", "--no-env", "--rc-file", rc_file, "-d", group_flag).splitlines()
== f"{group}# channels\n# Define the list of channels\nchannels:\n"
" - channel1\n - channel2\n".splitlines()
)
@ -283,18 +273,14 @@ class TestConfigList:
os.environ["MAMBA_OFFLINE"] = "false"
assert (
config(
"list", "offline", "--no-rc", "--no-env", "-s", "--offline"
).splitlines()
config("list", "offline", "--no-rc", "--no-env", "-s", "--offline").splitlines()
== "offline: true # 'CLI'".splitlines()
)
os.environ.pop("MAMBA_OFFLINE")
def test_precedence(self):
rc_dir = os.path.expanduser(
os.path.join("~", "test_mamba", helpers.random_string())
)
rc_dir = os.path.expanduser(os.path.join("~", "test_mamba", helpers.random_string()))
os.makedirs(rc_dir, exist_ok=True)
rc_file = os.path.join(rc_dir, ".mambarc")
short_rc_file = rc_file.replace(os.path.expanduser("~"), "~")
@ -322,9 +308,7 @@ class TestConfigList:
)
assert (
config(
"list", "offline", f"--rc-file={rc_file}", "-s", "--offline"
).splitlines()
config("list", "offline", f"--rc-file={rc_file}", "-s", "--offline").splitlines()
== f"offline: true # 'CLI' > 'MAMBA_OFFLINE' > '{short_rc_file}'".splitlines()
)
assert (
@ -359,18 +343,12 @@ class TestConfigList:
class TestConfigModifiers:
def test_file_set_single_input(self, rc_file):
config("set", "json", "true", "--file", rc_file)
assert (
config("get", "json", "--file", rc_file).splitlines()
== "json: true".splitlines()
)
assert config("get", "json", "--file", rc_file).splitlines() == "json: true".splitlines()
def test_file_set_change_key_value(self, rc_file):
config("set", "json", "true", "--file", rc_file)
config("set", "json", "false", "--file", rc_file)
assert (
config("get", "json", "--file", rc_file).splitlines()
== "json: false".splitlines()
)
assert config("get", "json", "--file", rc_file).splitlines() == "json: false".splitlines()
def test_file_set_invalit_input(self, rc_file):
assert (
@ -721,9 +699,7 @@ class TestConfigExpandVars:
value = _expandvars(attr, "['${TEST_VAR}']", "foo")
assert value == ["foo"]
custom_channels = _expandvars(
"custom_channels", "{'x': '${TEST_VAR}'}", "http://foo"
)
custom_channels = _expandvars("custom_channels", "{'x': '${TEST_VAR}'}", "http://foo")
assert custom_channels["x"] == "http://foo"
custom_multichannels = _expandvars(
@ -793,9 +769,7 @@ class TestConfigExpandVars:
monkeypatch.setenv("foo", "bar", True)
monkeypatch.setenv("{foo", "baz1", True)
monkeypatch.setenv("{foo}", "baz2", True)
assert outp == self._roundtrip_attr(
rc_file, "channel_alias", yaml_quote + inp + yaml_quote
)
assert outp == self._roundtrip_attr(rc_file, "channel_alias", yaml_quote + inp + yaml_quote)
@pytest.mark.parametrize(
"inp,outp",
@ -840,9 +814,7 @@ class TestConfigExpandVars:
)
monkeypatch.setenv("CONDA_API_KEY", "kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk", True)
monkeypatch.setenv("CONDA_CHANNEL_UPLOAD_USER", "uuuuuuuuu", True)
monkeypatch.setenv(
"CONDA_CHANNEL_UPLOAD_PASSWORD", "pppppppppppppppppppp", True
)
monkeypatch.setenv("CONDA_CHANNEL_UPLOAD_PASSWORD", "pppppppppppppppppppp", True)
out = self._roundtrip(rc_file, condarc)
assert (
out["channel_alias"]

View File

@ -4,11 +4,11 @@ import os
import shutil
import subprocess
from .helpers import *
from . import helpers
def constructor(*args, default_channel=True, no_rc=True, no_dry_run=False):
umamba = get_umamba()
umamba = helpers.get_umamba()
cmd = [umamba, "constructor"] + [arg for arg in args if arg]
try:
@ -33,8 +33,8 @@ class TestInstall:
current_prefix = os.environ["CONDA_PREFIX"]
cache = os.path.join(current_root_prefix, "pkgs")
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
new_cache = os.path.join(root_prefix, "pkgs")
@ -46,17 +46,13 @@ class TestInstall:
# speed-up the tests
os.environ["CONDA_PKGS_DIRS"] = TestInstall.new_cache
os.makedirs(TestInstall.new_cache, exist_ok=True)
root_pkgs = glob.glob(
os.path.join(TestInstall.current_root_prefix, "pkgs", "x*.tar.bz2")
)
root_pkgs = glob.glob(os.path.join(TestInstall.current_root_prefix, "pkgs", "x*.tar.bz2"))
urls = []
for pkg in root_pkgs:
shutil.copy(pkg, TestInstall.new_cache)
urls.append(
"http://testurl.com/conda-forge/linux-64/"
+ os.path.basename(pkg)
+ "#123412341234"
"http://testurl.com/conda-forge/linux-64/" + os.path.basename(pkg) + "#123412341234"
)
cls.pkgs = [os.path.basename(pkg) for pkg in root_pkgs]
@ -78,19 +74,12 @@ class TestInstall:
constructor("--prefix", TestInstall.root_prefix, "--extract-conda-pkgs")
for pkg in self.pkgs:
extracted_pkg = os.path.join(
TestInstall.root_prefix, "pkgs", pkg.rsplit(".tar.bz2")[0]
)
with open(
os.path.join(extracted_pkg, "info", "repodata_record.json")
) as rr:
extracted_pkg = os.path.join(TestInstall.root_prefix, "pkgs", pkg.rsplit(".tar.bz2")[0])
with open(os.path.join(extracted_pkg, "info", "repodata_record.json")) as rr:
repodata_record = json.load(rr)
with open(os.path.join(extracted_pkg, "info", "index.json")) as ri:
index = json.load(ri)
assert repodata_record["fn"] == pkg
assert repodata_record["md5"] == "123412341234"
assert (
repodata_record["url"]
== "http://testurl.com/conda-forge/linux-64/" + pkg
)
assert repodata_record["url"] == "http://testurl.com/conda-forge/linux-64/" + pkg
assert repodata_record["depends"] == index["depends"]

View File

@ -110,41 +110,31 @@ def test_lockfile(tmp_home, tmp_root_prefix, tmp_path):
assert res["success"]
packages = helpers.umamba_list("-p", env_prefix, "--json")
assert any(
package["name"] == "zlib" and package["version"] == "1.2.11"
for package in packages
)
assert any(package["name"] == "zlib" and package["version"] == "1.2.11" for package in packages)
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_lockfile_online(tmp_home, tmp_root_prefix, tmp_path):
env_prefix = tmp_path / "myenv"
spec_file = "https://raw.githubusercontent.com/mamba-org/mamba/main/micromamba/tests/test_env-lock.yaml"
spec_file = (
"https://raw.githubusercontent.com/mamba-org/mamba/main/micromamba/tests/test_env-lock.yaml"
)
res = helpers.create("-p", env_prefix, "-f", spec_file, "--json")
assert res["success"]
packages = helpers.umamba_list("-p", env_prefix, "--json")
assert any(
package["name"] == "zlib" and package["version"] == "1.2.11"
for package in packages
)
assert any(package["name"] == "zlib" and package["version"] == "1.2.11" for package in packages)
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_env_lockfile_different_install_after_create(
tmp_home, tmp_root_prefix, tmp_path
):
def test_env_lockfile_different_install_after_create(tmp_home, tmp_root_prefix, tmp_path):
env_prefix = tmp_path / "myenv"
create_spec_file = tmp_path / "env-create-lock.yaml"
install_spec_file = tmp_path / "env-install-lock.yaml"
shutil.copyfile(
__this_dir__ / "envlockfile-check-step-1-lock.yaml", create_spec_file
)
shutil.copyfile(
__this_dir__ / "envlockfile-check-step-2-lock.yaml", install_spec_file
)
shutil.copyfile(__this_dir__ / "envlockfile-check-step-1-lock.yaml", create_spec_file)
shutil.copyfile(__this_dir__ / "envlockfile-check-step-2-lock.yaml", install_spec_file)
res = helpers.create("-p", env_prefix, "-f", create_spec_file, "-y", "--json")
assert res["success"]
@ -292,9 +282,7 @@ def test_channels(tmp_home, tmp_root_prefix, tmp_path, cli, yaml, env_var, rc_fi
cmd += ["--rc-file", rc_file]
expected_channels += ["rc"]
res = helpers.create(
*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False
)
res = helpers.create(*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False)
check_create_result(res, tmp_root_prefix, env_prefix)
if expected_channels:
assert res["channels"] == expected_channels
@ -378,9 +366,7 @@ def test_multiprocessing():
"already_exists, is_conda_env", ((False, False), (True, False), (True, True))
)
@pytest.mark.parametrize("has_specs", (False, True))
def test_create_base(
tmp_home, tmp_root_prefix, already_exists, is_conda_env, has_specs
):
def test_create_base(tmp_home, tmp_root_prefix, already_exists, is_conda_env, has_specs):
if already_exists:
if is_conda_env:
(tmp_root_prefix / "conda-meta").mkdir()
@ -546,9 +532,7 @@ def test_always_yes(tmp_home, tmp_root_prefix, tmp_path, source):
)
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
@pytest.mark.parametrize("relocate_prefix", ["/home/bob/env", "/"])
def test_create_with_relocate_prefix(
tmp_home, tmp_root_prefix, tmp_path, relocate_prefix
):
def test_create_with_relocate_prefix(tmp_home, tmp_root_prefix, tmp_path, relocate_prefix):
env_prefix = tmp_path / "myenv"
res = helpers.create(
"-p",
@ -645,7 +629,7 @@ def test_spec_with_channel(tmp_home, tmp_root_prefix, tmp_path):
def test_spec_with_channel_and_subdir():
env_name = "myenv"
try:
res = helpers.create("-n", env_name, "conda-forge/noarch::xtensor", "--dry-run")
helpers.create("-n", env_name, "conda-forge/noarch::xtensor", "--dry-run")
except subprocess.CalledProcessError as e:
assert e.stderr.decode() == (
'critical libmamba The package "conda-forge/noarch::xtensor" is '
@ -719,9 +703,7 @@ def test_pin_applicable(tmp_home, tmp_root_prefix, tmp_path):
with open(rc_file, "w+") as f:
f.write(f"""pinned_packages: ["{pin_name}<={pin_max_version}"]""")
res = helpers.create(
"-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False
)
res = helpers.create("-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False)
install_pkg = None
for p in res["actions"]["LINK"]:
@ -741,9 +723,7 @@ def test_pin_not_applicable(tmp_home, tmp_root_prefix, tmp_path):
with open(rc_file, "w+") as f:
f.write(f"""pinned_packages: ["{pin_name}"]""")
res = helpers.create(
"-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False
)
res = helpers.create("-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False)
assert res["success"] is True
helpers.get_concrete_pkg(res, spec_name) # Not trowing
@ -829,9 +809,7 @@ def test_create_check_dirs(tmp_home, tmp_root_prefix):
if platform.system() == "Windows":
assert os.path.isdir(env_prefix / "lib" / "site-packages" / "traitlets")
else:
assert os.path.isdir(
env_prefix / "lib" / "python3.8" / "site-packages" / "traitlets"
)
assert os.path.isdir(env_prefix / "lib" / "python3.8" / "site-packages" / "traitlets")
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
@ -843,9 +821,7 @@ def test_requires_pip_install(tmp_home, tmp_root_prefix, env_file):
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
@pytest.mark.parametrize("env_file", env_files)
def test_requires_pip_install_prefix_spaces(
tmp_home, tmp_root_prefix, tmp_path, env_file
):
def test_requires_pip_install_prefix_spaces(tmp_home, tmp_root_prefix, tmp_path, env_file):
env_prefix = tmp_path / "prefix with space"
cmd = ["-p", env_prefix, "-f", env_file]
helpers.create(*cmd)
@ -886,9 +862,7 @@ def test_pre_commit_compat(tmp_home, tmp_root_prefix, tmp_path):
helpers.subprocess_run("git", "config", "user.name", "test", cwd=path)
helpers.subprocess_run("git", "add", ".", cwd=path)
helpers.subprocess_run("git", "commit", "-m", "Initialize repo", cwd=path)
return helpers.subprocess_run(
"git", "rev-parse", "HEAD", cwd=path, text=True
).strip()
return helpers.subprocess_run("git", "rev-parse", "HEAD", cwd=path, text=True).strip()
hook_repo = tmp_path / "hook_repo"
caller_repo = tmp_path / "caller_repo"
@ -994,16 +968,12 @@ def copy_channels_osx():
)
with open(__this_dir__ / f"channel_{channel}/osx-64/repodata.json") as f:
repodata = f.read()
with open(
__this_dir__ / f"channel_{channel}/osx-64/repodata.json", "w"
) as f:
with open(__this_dir__ / f"channel_{channel}/osx-64/repodata.json", "w") as f:
repodata = repodata.replace("linux", "osx")
f.write(repodata)
def test_dummy_create(
add_glibc_virtual_package, copy_channels_osx, tmp_home, tmp_root_prefix
):
def test_dummy_create(add_glibc_virtual_package, copy_channels_osx, tmp_home, tmp_root_prefix):
env_name = "myenv"
channels = [
@ -1022,8 +992,7 @@ def test_dummy_create(
res = helpers.create_with_chan_pkg(env_name, channels, package)
assert any(
link["name"] == "b" and "channel_a" in link["channel"]
for link in res["actions"]["LINK"]
link["name"] == "b" and "channel_a" in link["channel"] for link in res["actions"]["LINK"]
)
channels = channels[::-1]
@ -1052,15 +1021,13 @@ def test_create_dry_run(tmp_home, tmp_root_prefix, use_json):
def test_create_with_non_existing_subdir(tmp_home, tmp_root_prefix, tmp_path):
env_prefix = tmp_path / "myprefix"
with pytest.raises(subprocess.CalledProcessError) as e:
helpers.create(
"-p", env_prefix, "--dry-run", "--json", f"conda-forge/noarch::xtensor"
)
with pytest.raises(subprocess.CalledProcessError):
helpers.create("-p", env_prefix, "--dry-run", "--json", "conda-forge/noarch::xtensor")
def test_create_with_multiple_files(tmp_home, tmp_root_prefix, tmpdir):
env_name = "myenv"
env_prefix = tmp_root_prefix / "envs" / env_name
tmp_root_prefix / "envs" / env_name
# Check that multiple --file arguments are considered
(tmpdir / "file_a.txt").write(b"a")
@ -1097,7 +1064,7 @@ multichannel_config = {
def test_create_with_multi_channels(tmp_home, tmp_root_prefix, tmp_path):
env_name = "myenv"
env_prefix = tmp_root_prefix / "envs" / env_name
tmp_root_prefix / "envs" / env_name
rc_file = tmp_path / "config.yaml"
rc_file.write_text(yaml.dump(multichannel_config))
@ -1119,17 +1086,15 @@ def test_create_with_multi_channels(tmp_home, tmp_root_prefix, tmp_path):
assert pkg["url"].startswith("https://conda.anaconda.org/conda-forge/")
def test_create_with_multi_channels_and_non_existing_subdir(
tmp_home, tmp_root_prefix, tmp_path
):
def test_create_with_multi_channels_and_non_existing_subdir(tmp_home, tmp_root_prefix, tmp_path):
env_name = "myenv"
env_prefix = tmp_root_prefix / "envs" / env_name
tmp_root_prefix / "envs" / env_name
rc_file = tmp_path / "config.yaml"
rc_file.write_text(yaml.dump(multichannel_config))
with pytest.raises(subprocess.CalledProcessError) as e:
res = helpers.create(
with pytest.raises(subprocess.CalledProcessError):
helpers.create(
"-n",
env_name,
"conda-forge2/noarch::xtensor",

View File

@ -84,9 +84,7 @@ def test_env_export(export_env, explicit_flag, md5_flag, channel_subdir_flag):
assert set(ret["channels"]) == {"conda-forge"}
assert "micromamba=0.24.0=0" in str(ret["dependencies"])
if md5_flag == "--md5":
assert re.search(
r"micromamba=0.24.0=0\[md5=[a-f0-9]{32}\]", str(ret["dependencies"])
)
assert re.search(r"micromamba=0.24.0=0\[md5=[a-f0-9]{32}\]", str(ret["dependencies"]))
if channel_subdir_flag:
assert re.search(
r"conda-forge/[a-z0-9-]+::micromamba=0.24.0=0", str(ret["dependencies"])
@ -141,17 +139,13 @@ def test_env_update(tmp_home, tmp_root_prefix, tmp_path, prune):
env_name = "env-create-update"
# Create env with python=3.6.15 and xtensor=0.20.0
helpers.create(
"python=3.6.15", "xtensor=0.20.0", "-n", env_name, "--json", no_dry_run=True
)
helpers.create("python=3.6.15", "xtensor=0.20.0", "-n", env_name, "--json", no_dry_run=True)
packages = helpers.umamba_list("-n", env_name, "--json")
assert any(
package["name"] == "python" and package["version"] == "3.6.15"
for package in packages
package["name"] == "python" and package["version"] == "3.6.15" for package in packages
)
assert any(
package["name"] == "xtensor" and package["version"] == "0.20.0"
for package in packages
package["name"] == "xtensor" and package["version"] == "0.20.0" for package in packages
)
assert any(package["name"] == "xtl" for package in packages)
@ -176,8 +170,7 @@ def test_env_update(tmp_home, tmp_root_prefix, tmp_path, prune):
assert not any(package["name"] == "xtl" for package in packages)
else:
assert any(
package["name"] == "xtensor" and package["version"] == "0.20.0"
for package in packages
package["name"] == "xtensor" and package["version"] == "0.20.0" for package in packages
)
assert any(package["name"] == "xtl" for package in packages)
@ -195,9 +188,9 @@ def test_explicit_export_topologically_sorted(tmp_home, tmp_prefix):
"pip": 0,
"jupyterlab": 0,
}
for i, l in enumerate(lines):
for i, line in enumerate(lines):
for pkg in indices.keys():
if pkg in l:
if pkg in line:
indices[pkg] = i
assert indices["libzlib"] < indices["python"]

View File

@ -6,15 +6,17 @@ from pathlib import Path
import pytest
from .helpers import *
# Need to import everything to get fixtures
from .helpers import * # noqa: F403
from . import helpers
class TestInstall:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
@classmethod
@ -24,7 +26,7 @@ class TestInstall:
@classmethod
def setup_method(cls):
create("-n", TestInstall.env_name, "--offline", no_dry_run=True)
helpers.create("-n", TestInstall.env_name, "--offline", no_dry_run=True)
@classmethod
def teardown_class(cls):
@ -41,7 +43,7 @@ class TestInstall:
os.environ.pop(v)
if Path(TestInstall.prefix).exists():
rmtree(TestInstall.prefix)
helpers.rmtree(TestInstall.prefix)
@classmethod
def config_tests(cls, res, root_prefix=root_prefix, target_prefix=prefix):
@ -49,10 +51,10 @@ class TestInstall:
assert res["target_prefix"] == target_prefix
assert res["use_target_prefix_fallback"]
checks = (
MAMBA_ALLOW_EXISTING_PREFIX
| MAMBA_NOT_ALLOW_MISSING_PREFIX
| MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
| MAMBA_EXPECT_EXISTING_PREFIX
helpers.MAMBA_ALLOW_EXISTING_PREFIX
| helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX
| helpers.MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
| helpers.MAMBA_EXPECT_EXISTING_PREFIX
)
assert res["target_prefix_checks"] == checks
@ -77,7 +79,7 @@ class TestInstall:
cmd = list(specs)
if source in ("spec_file_only", "both"):
f_name = random_string()
f_name = helpers.random_string()
spec_file = os.path.join(TestInstall.root_prefix, f_name)
if file_type == "classic":
@ -86,10 +88,8 @@ class TestInstall:
elif file_type == "explicit":
channel = "https://conda.anaconda.org/conda-forge/linux-64/"
explicit_specs = [
channel
+ "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
channel
+ "xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
channel + "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
channel + "xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
]
file_content = ["@EXPLICIT"] + explicit_specs
specs = explicit_specs
@ -103,7 +103,7 @@ class TestInstall:
cmd += ["-f", spec_file]
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
TestInstall.config_tests(res)
assert res["env_name"] == ""
@ -130,9 +130,7 @@ class TestInstall:
cmd = []
if root_prefix in (None, "cli"):
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop(
"MAMBA_ROOT_PREFIX"
)
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop("MAMBA_ROOT_PREFIX")
if root_prefix == "cli":
cmd += ["-r", TestInstall.root_prefix]
@ -155,7 +153,7 @@ class TestInstall:
cmd += ["-n", n]
if yaml_name:
f_name = random_string() + ".yaml"
f_name = helpers.random_string() + ".yaml"
spec_file = os.path.join(TestInstall.prefix, f_name)
if yaml_name == "prefix":
@ -188,9 +186,9 @@ class TestInstall:
or not (cli_prefix or cli_env_name or yaml_name or env_var or fallback)
):
with pytest.raises(subprocess.CalledProcessError):
install(*cmd, "--print-config-only")
helpers.install(*cmd, "--print-config-only")
else:
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
TestInstall.config_tests(res, root_prefix=r, target_prefix=expected_p)
@pytest.mark.parametrize("cli", (False, True))
@ -206,7 +204,7 @@ class TestInstall:
expected_channels += ["cli"]
if yaml:
f_name = random_string() + ".yaml"
f_name = helpers.random_string() + ".yaml"
spec_file = os.path.join(TestInstall.prefix, f_name)
file_content = [
@ -224,7 +222,7 @@ class TestInstall:
expected_channels += ["env_var"]
if rc_file:
f_name = random_string() + ".yaml"
f_name = helpers.random_string() + ".yaml"
rc_file = os.path.join(TestInstall.prefix, f_name)
file_content = ["channels: [rc]"]
@ -234,9 +232,7 @@ class TestInstall:
cmd += ["--rc-file", rc_file]
expected_channels += ["rc"]
res = install(
*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False
)
res = helpers.install(*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False)
TestInstall.config_tests(res)
if expected_channels:
assert res["channels"] == expected_channels
@ -249,14 +245,12 @@ class TestInstall:
specs = ["xtensor", "xsimd"]
channel = "https://conda.anaconda.org/conda-forge/linux-64/"
explicit_specs = [
channel
+ "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
channel
+ "linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
channel + "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
channel + "linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
]
for i in range(2):
f_name = random_string()
f_name = helpers.random_string()
file = os.path.join(TestInstall.prefix, f_name)
if type == "yaml":
@ -264,7 +258,6 @@ class TestInstall:
file_content = [f"dependencies: [{specs[i]}]"]
elif type == "classic":
file_content = [specs[i]]
expected_specs = specs
else: # explicit
file_content = ["@EXPLICIT", explicit_specs[i]]
@ -275,9 +268,9 @@ class TestInstall:
if type == "yaml":
with pytest.raises(subprocess.CalledProcessError):
install(*cmd, "--print-config-only")
helpers.install(*cmd, "--print-config-only")
else:
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
if type == "classic":
assert res["specs"] == specs
else: # explicit
@ -286,9 +279,7 @@ class TestInstall:
@pytest.mark.parametrize("priority", (None, "disabled", "flexible", "strict"))
@pytest.mark.parametrize("no_priority", (None, True))
@pytest.mark.parametrize("strict_priority", (None, True))
def test_channel_priority(
self, priority, no_priority, strict_priority, existing_cache
):
def test_channel_priority(self, priority, no_priority, strict_priority, existing_cache):
cmd = ["-p", TestInstall.prefix, "xtensor"]
expected_priority = "flexible"
@ -311,14 +302,14 @@ class TestInstall:
or (no_priority and strict_priority)
):
with pytest.raises(subprocess.CalledProcessError):
install(*cmd, "--print-config-only")
helpers.install(*cmd, "--print-config-only")
else:
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
assert res["channel_priority"] == expected_priority
def test_quotes(self, existing_cache):
cmd = ["-p", f"{TestInstall.prefix}", "xtensor", "--print-config-only"]
res = install(*cmd)
res = helpers.install(*cmd)
assert res["target_prefix"] == TestInstall.prefix
@pytest.mark.parametrize("prefix", ("target", "root"))
@ -338,27 +329,28 @@ class TestInstall:
"xtensor",
"--print-config-only",
]
res = install(*cmd)
res = helpers.install(*cmd)
assert res["target_prefix"] == TestInstall.prefix
assert res["root_prefix"] == TestInstall.root_prefix
def test_empty_specs(self, existing_cache):
assert "Nothing to do." in install().strip()
assert "Nothing to do." in helpers.install().strip()
@pytest.mark.skipif(
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
reason="Running only ultra-dry tests",
)
@pytest.mark.parametrize("already_installed", [False, True])
def test_non_explicit_spec(self, already_installed, existing_cache):
cmd = ["-p", TestInstall.prefix, "xtensor", "--json"]
if already_installed:
install(*cmd, no_dry_run=True)
helpers.install(*cmd, no_dry_run=True)
res = install(*cmd)
res = helpers.install(*cmd)
assert res["success"]
assert res["dry_run"] == (dry_run_tests == DryRun.DRY)
assert res["dry_run"] == (helpers.dry_run_tests == helpers.DryRun.DRY)
if already_installed:
keys = {"dry_run", "success", "prefix", "message"}
assert keys.issubset(set(res.keys()))
@ -373,15 +365,16 @@ class TestInstall:
expected_packages = {"xtensor", "xtl"}
assert expected_packages.issubset(packages)
if not dry_run_tests:
pkg_name = get_concrete_pkg(res, "xtensor")
orig_file_path = get_pkg(
pkg_name, xtensor_hpp, TestInstall.current_root_prefix
if not helpers.dry_run_tests:
pkg_name = helpers.get_concrete_pkg(res, "xtensor")
orig_file_path = helpers.get_pkg(
pkg_name, helpers.xtensor_hpp, TestInstall.current_root_prefix
)
assert orig_file_path.exists()
@pytest.mark.skipif(
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
reason="Running only ultra-dry tests",
)
@pytest.mark.parametrize("already_installed", [False, True])
@pytest.mark.parametrize("valid", [False, True])
@ -400,9 +393,9 @@ class TestInstall:
cmd = ("-p", TestInstall.prefix, "-q", "-f", spec_file)
if valid:
install(*cmd, default_channel=False)
helpers.install(*cmd, default_channel=False)
list_res = umamba_list("-p", TestInstall.prefix, "--json")
list_res = helpers.umamba_list("-p", TestInstall.prefix, "--json")
assert len(list_res) == 1
pkg = list_res[0]
assert pkg["name"] == "xtensor"
@ -410,10 +403,11 @@ class TestInstall:
assert pkg["build_string"] == "hc9558a2_0"
else:
with pytest.raises(subprocess.CalledProcessError):
install(*cmd, default_channel=False)
helpers.install(*cmd, default_channel=False)
@pytest.mark.skipif(
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
reason="Running only ultra-dry tests",
)
@pytest.mark.parametrize(
"alias",
@ -426,22 +420,23 @@ class TestInstall:
)
def test_channel_alias(self, alias, existing_cache):
if alias:
res = install("xtensor", "--json", "--channel-alias", alias)
res = helpers.install("xtensor", "--json", "--channel-alias", alias)
ca = alias.rstrip("/")
else:
res = install("xtensor", "--json")
res = helpers.install("xtensor", "--json")
ca = "https://conda.anaconda.org"
for l in res["actions"]["LINK"]:
assert l["channel"].startswith(f"{ca}/conda-forge/")
assert l["url"].startswith(f"{ca}/conda-forge/")
for to_link in res["actions"]["LINK"]:
assert to_link["channel"].startswith(f"{ca}/conda-forge/")
assert to_link["url"].startswith(f"{ca}/conda-forge/")
@pytest.mark.skipif(
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
reason="Running only ultra-dry tests",
)
def test_no_python_pinning(self, existing_cache):
install("python=3.9", no_dry_run=True)
res = install("setuptools=28.4.0", "--no-py-pin", "--json")
helpers.install("python=3.9", no_dry_run=True)
res = helpers.install("setuptools=28.4.0", "--no-py-pin", "--json")
keys = {"success", "prefix", "actions", "dry_run"}
assert keys.issubset(set(res.keys()))
@ -449,9 +444,7 @@ class TestInstall:
action_keys = {"LINK", "UNLINK", "PREFIX"}
assert action_keys.issubset(set(res["actions"].keys()))
expected_link_packages = (
{"python"} if os.name == "nt" else {"python", "python_abi"}
)
expected_link_packages = {"python"} if os.name == "nt" else {"python", "python_abi"}
link_packages = {pkg["name"] for pkg in res["actions"]["LINK"]}
assert expected_link_packages.issubset(link_packages)
unlink_packages = {pkg["name"] for pkg in res["actions"]["UNLINK"]}
@ -464,26 +457,28 @@ class TestInstall:
assert py_pkg["version"].startswith("3.9")
@pytest.mark.skipif(
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
reason="Running only ultra-dry tests",
)
@pytest.mark.skipif(sys.platform == "win32", reason="Python2 no available")
def test_python_pinning(self, existing_cache):
"""Black fails to install as it is not available for pinned Python 2."""
res = install("python=2", "--json", no_dry_run=True)
res = helpers.install("python=2", "--json", no_dry_run=True)
assert res["success"]
# We do not have great way to check for the type of error for now
try:
install("black", "--py-pin", "--json")
helpers.install("black", "--py-pin", "--json")
assert False
except subprocess.CalledProcessError:
pass
@pytest.mark.skipif(
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
reason="Running only ultra-dry tests",
)
def test_freeze_installed(self, existing_cache):
install("xtensor=0.20", no_dry_run=True)
res = install("xframe", "--freeze-installed", "--json")
helpers.install("xtensor=0.20", no_dry_run=True)
res = helpers.install("xframe", "--freeze-installed", "--json")
# without freeze installed, xframe 0.3.0 should be installed and xtensor updated to 0.21
keys = {"success", "prefix", "actions", "dry_run"}
@ -498,9 +493,7 @@ class TestInstall:
assert res["actions"]["LINK"][0]["version"] == "0.2.0"
def test_channel_specific(self, existing_cache):
res = install(
"conda-forge::xtensor", "--json", default_channel=False, no_rc=True
)
res = helpers.install("conda-forge::xtensor", "--json", default_channel=False, no_rc=True)
keys = {"success", "prefix", "actions", "dry_run"}
assert keys.issubset(set(res.keys()))
@ -516,12 +509,11 @@ class TestInstall:
assert pkg["channel"].startswith("https://conda.anaconda.org/conda-forge/")
def test_explicit_noarch(self, existing_cache):
install("python", no_dry_run=True)
helpers.install("python", no_dry_run=True)
channel = "https://conda.anaconda.org/conda-forge/noarch/"
explicit_spec = (
channel
+ "appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b"
channel + "appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b"
)
file_content = ["@EXPLICIT", explicit_spec]
@ -531,9 +523,9 @@ class TestInstall:
cmd = ("-p", TestInstall.prefix, "-q", "-f", spec_file)
install(*cmd, default_channel=False)
helpers.install(*cmd, default_channel=False)
list_res = umamba_list("-p", TestInstall.prefix, "--json")
list_res = helpers.umamba_list("-p", TestInstall.prefix, "--json")
pkgs = [p for p in list_res if p["name"] == "appdirs"]
assert len(pkgs) == 1
pkg = pkgs[0]
@ -541,33 +533,31 @@ class TestInstall:
assert pkg["build_string"] == "pyh9f0ad1d_0"
def test_broken_package_name(self):
non_existing_url = (
"https://026e9ab9-6b46-4285-ae0d-427553801720.de/mypackage.tar.bz2"
)
non_existing_url = "https://026e9ab9-6b46-4285-ae0d-427553801720.de/mypackage.tar.bz2"
try:
res = install(non_existing_url, default_channel=False)
helpers.install(non_existing_url, default_channel=False)
except subprocess.CalledProcessError as e:
assert "Invalid package filename" in e.stderr.decode("utf-8")
def test_no_reinstall(self, existing_cache):
"""Reinstalling is a no op."""
res = install("xtensor", "--json")
res = helpers.install("xtensor", "--json")
assert "xtensor" in {pkg["name"] for pkg in res["actions"]["LINK"]}
reinstall_res = install("xtensor", "--json")
reinstall_res = helpers.install("xtensor", "--json")
assert "actions" not in reinstall_res
def test_force_reinstall(self, existing_cache):
"""Force reinstall installs existing package again."""
res = install("xtensor", "--json")
res = helpers.install("xtensor", "--json")
assert "xtensor" in {pkg["name"] for pkg in res["actions"]["LINK"]}
reinstall_res = install("xtensor", "--force-reinstall", "--json")
reinstall_res = helpers.install("xtensor", "--force-reinstall", "--json")
assert "xtensor" in {pkg["name"] for pkg in reinstall_res["actions"]["LINK"]}
def test_force_reinstall_not_installed(self, existing_cache):
"""Force reinstall on non-installed packages is valid."""
reinstall_res = install("xtensor", "--force-reinstall", "--json")
reinstall_res = helpers.install("xtensor", "--force-reinstall", "--json")
assert "xtensor" in {pkg["name"] for pkg in reinstall_res["actions"]["LINK"]}
@ -575,13 +565,13 @@ def test_install_check_dirs(tmp_home, tmp_root_prefix):
env_name = "myenv"
env_prefix = tmp_root_prefix / "envs" / env_name
create("-n", env_name, "python=3.8")
res = install("-n", env_name, "nodejs", "--json")
helpers.create("-n", env_name, "python=3.8")
res = helpers.install("-n", env_name, "nodejs", "--json")
assert os.path.isdir(env_prefix)
assert "nodejs" in {pkg["name"] for pkg in res["actions"]["LINK"]}
if platform.system() == "Windows":
if helpers.platform.system() == "Windows":
assert os.path.isdir(env_prefix / "lib" / "site-packages")
else:
assert os.path.isdir(env_prefix / "lib" / "python3.8" / "site-packages")
@ -589,12 +579,12 @@ def test_install_check_dirs(tmp_home, tmp_root_prefix):
def test_track_features(tmp_home, tmp_root_prefix):
env_name = "myenv"
env_prefix = tmp_root_prefix / "envs" / env_name
tmp_root_prefix / "envs" / env_name
# should install CPython since PyPy has track features
version = "3.7.9"
create("-n", env_name, default_channel=False, no_rc=False)
install(
helpers.create("-n", env_name, default_channel=False, no_rc=False)
helpers.install(
"-n",
env_name,
"-q",
@ -602,20 +592,20 @@ def test_track_features(tmp_home, tmp_root_prefix):
"--strict-channel-priority",
no_rc=False,
)
res = umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
if platform.system() == "Windows":
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
if helpers.platform.system() == "Windows":
assert res.strip().startswith(version)
assert "[MSC v." in res.strip()
elif platform.system() == "Linux":
elif helpers.platform.system() == "Linux":
assert res.strip().startswith(version)
assert "[GCC" in res.strip()
else:
assert res.strip().startswith(version)
assert "[Clang" in res.strip()
if platform.system() == "Linux":
if helpers.platform.system() == "Linux":
# now force PyPy install
install(
helpers.install(
"-n",
env_name,
"-q",
@ -623,9 +613,7 @@ def test_track_features(tmp_home, tmp_root_prefix):
"--strict-channel-priority",
no_rc=False,
)
res = umamba_run(
"-n", env_name, "python", "-c", "import sys; print(sys.version)"
)
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
assert res.strip().startswith(version)
assert "[PyPy" in res.strip()
@ -633,11 +621,11 @@ def test_track_features(tmp_home, tmp_root_prefix):
def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
env_name = "myenv"
env_prefix = tmp_root_prefix / "envs" / env_name
tmp_root_prefix / "envs" / env_name
version = "3.8"
create("-n", env_name, default_channel=False, no_rc=False)
install(
helpers.create("-n", env_name, default_channel=False, no_rc=False)
helpers.install(
"-n",
env_name,
"-q",
@ -646,17 +634,15 @@ def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
no_rc=False,
)
res = umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
assert version in res
res = umamba_run(
"-n", env_name, "python", "-c", "import pip; print(pip.__version__)"
)
res = helpers.umamba_run("-n", env_name, "python", "-c", "import pip; print(pip.__version__)")
assert len(res)
# Update python version
version = "3.9"
install(
helpers.install(
"-n",
env_name,
"-q",
@ -664,10 +650,8 @@ def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
no_rc=False,
)
res = umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
assert version in res
res = umamba_run(
"-n", env_name, "python", "-c", "import pip; print(pip.__version__)"
)
res = helpers.umamba_run("-n", env_name, "python", "-c", "import pip; print(pip.__version__)")
assert len(res)

View File

@ -1,15 +1,12 @@
import json
import os
import platform
import random
import shutil
import string
import subprocess
from pathlib import Path
import pytest
from .helpers import *
# Need to import everything to get fixtures
from .helpers import * # noqa: F403
from . import helpers
if platform.system() == "Windows":
xtensor_hpp = "Library/include/xtensor/xtensor.hpp"
@ -21,8 +18,8 @@ class TestLinking:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
@classmethod
@ -35,17 +32,17 @@ class TestLinking:
os.environ["CONDA_PREFIX"] = TestLinking.current_prefix
if Path(TestLinking.root_prefix).exists():
rmtree(TestLinking.root_prefix)
helpers.rmtree(TestLinking.root_prefix)
@classmethod
def teardown_method(cls):
if Path(TestLinking.prefix).exists():
rmtree(TestLinking.prefix)
helpers.rmtree(TestLinking.prefix)
def test_link(self, existing_cache, test_pkg):
create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
helpers.create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert not linked_file.is_symlink()
@ -54,7 +51,7 @@ class TestLinking:
assert cache_file.stat().st_ino == linked_file.stat().st_ino
def test_copy(self, existing_cache, test_pkg):
create(
helpers.create(
"xtensor",
"-n",
TestLinking.env_name,
@ -62,7 +59,7 @@ class TestLinking:
"--always-copy",
no_dry_run=True,
)
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert not linked_file.is_symlink()
@ -75,7 +72,7 @@ class TestLinking:
reason="Softlinking needs admin privileges on win",
)
def test_always_softlink(self, existing_cache, test_pkg):
create(
helpers.create(
"xtensor",
"-n",
TestLinking.env_name,
@ -83,7 +80,7 @@ class TestLinking:
"--always-softlink",
no_dry_run=True,
)
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert linked_file.is_symlink()
@ -105,15 +102,13 @@ class TestLinking:
create_args.append("--allow-softlinks")
if always_copy:
create_args.append("--always-copy")
create(*create_args, no_dry_run=True)
helpers.create(*create_args, no_dry_run=True)
same_device = (
existing_cache.stat().st_dev == Path(TestLinking.prefix).stat().st_dev
)
same_device = existing_cache.stat().st_dev == Path(TestLinking.prefix).stat().st_dev
is_softlink = not same_device and allow_softlinks and not always_copy
is_hardlink = same_device and not always_copy
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
cache_file = existing_cache / test_pkg / xtensor_hpp
@ -122,16 +117,14 @@ class TestLinking:
assert linked_file.is_symlink() == is_softlink
def test_unlink_missing_file(self):
create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
helpers.create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
assert linked_file.exists()
assert not linked_file.is_symlink()
os.remove(linked_file)
remove("xtensor", "-n", TestLinking.env_name)
helpers.remove("xtensor", "-n", TestLinking.env_name)
def test_link_missing_scripts_dir(self): # issue 2808
create(
"python=3.7", "pypy", "-n", TestLinking.env_name, "--json", no_dry_run=True
)
helpers.create("python=3.7", "pypy", "-n", TestLinking.env_name, "--json", no_dry_run=True)

View File

@ -8,9 +8,7 @@ from . import helpers
@pytest.mark.parametrize("quiet_flag", ["", "-q", "--quiet"])
@pytest.mark.parametrize("env_selector", ["", "name", "prefix"])
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_list(
tmp_home, tmp_root_prefix, tmp_env_name, tmp_xtensor_env, env_selector, quiet_flag
):
def test_list(tmp_home, tmp_root_prefix, tmp_env_name, tmp_xtensor_env, env_selector, quiet_flag):
if env_selector == "prefix":
res = helpers.umamba_list("-p", tmp_xtensor_env, "--json", quiet_flag)
elif env_selector == "name":

View File

@ -209,9 +209,7 @@ env_file_content = """
@pytest.mark.parametrize("user,password", [["testuser", "xyzpass"]])
def test_basic_auth_explicit_txt(
auth_file, user, password, basic_auth_server, tmp_path
):
def test_basic_auth_explicit_txt(auth_file, user, password, basic_auth_server, tmp_path):
login(basic_auth_server, "--username", user, "--password", password)
env_file = tmp_path / "environment.txt"
@ -224,9 +222,7 @@ def test_basic_auth_explicit_txt(
@pytest.mark.parametrize("user,password", [["testuser", "xyzpass"]])
def test_basic_auth_explicit_yaml(
auth_file, user, password, basic_auth_server, tmp_path
):
def test_basic_auth_explicit_yaml(auth_file, user, password, basic_auth_server, tmp_path):
login(basic_auth_server, "--username", user, "--password", password)
env_file = tmp_path / "environment.yml"

View File

@ -5,7 +5,7 @@ from pathlib import Path
import pytest
from .helpers import create, get_env, get_umamba, random_string, remove, umamba_list
from .helpers import create, random_string, remove
if sys.platform.startswith("win"):
import menuinst
@ -44,9 +44,7 @@ class TestMenuinst:
assert shortcut.TargetPath.lower() == os.getenv("COMSPEC").lower()
icon_location = shortcut.IconLocation
icon_location_path, icon_location_index = icon_location.split(",")
assert Path(icon_location_path) == (
Path(prefix) / "Menu" / "console_shortcut.ico"
)
assert Path(icon_location_path) == (Path(prefix) / "Menu" / "console_shortcut.ico")
assert icon_location_index == "0"
assert shortcut.Description == "Miniforge Prompt (" + env_name + ")"
@ -82,9 +80,7 @@ class TestMenuinst:
icon_location = shortcut.IconLocation
icon_location_path, icon_location_index = icon_location.split(",")
assert Path(icon_location_path) == (
Path(prefix) / "Menu" / "console_shortcut.ico"
)
assert Path(icon_location_path) == (Path(prefix) / "Menu" / "console_shortcut.ico")
assert icon_location_index == "0"
assert shortcut.Description == "Miniforge Prompt (" + env_name + ")"

View File

@ -10,7 +10,7 @@ import pytest
import zstandard
from conda_package_handling import api as cph
from .helpers import *
from . import helpers
@pytest.fixture
@ -37,7 +37,7 @@ def test_extract(cph_test_file: Path, tmp_path: Path):
shutil.copy(cph_test_file, tmp_path / "mm")
shutil.copy(cph_test_file, tmp_path / "cph")
mamba_exe = get_umamba()
mamba_exe = helpers.get_umamba()
subprocess.call(
[
mamba_exe,
@ -52,21 +52,13 @@ def test_extract(cph_test_file: Path, tmp_path: Path):
dest_dir=str(tmp_path / "cph" / "cph_test_data-0.0.1-0"),
)
conda = set(
(p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*"))
)
mamba = set(
(p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*"))
)
conda = set((p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*")))
mamba = set((p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*")))
assert conda == mamba
extracted = cph_test_file.name.removesuffix(".tar.bz2")
fcmp = filecmp.dircmp(tmp_path / "cph" / extracted, tmp_path / "mm" / extracted)
assert (
len(fcmp.left_only) == 0
and len(fcmp.right_only) == 0
and len(fcmp.diff_files) == 0
)
assert len(fcmp.left_only) == 0 and len(fcmp.right_only) == 0 and len(fcmp.diff_files) == 0
# fcmp.report_full_closure()
@ -101,8 +93,8 @@ def compare_two_tarfiles(tar1, tar2):
assert m1.linkname == m2.linkname
def assert_sorted(l):
assert l == sorted(l)
def assert_sorted(seq):
assert seq == sorted(seq)
def test_extract_compress(cph_test_file: Path, tmp_path: Path):
@ -110,7 +102,7 @@ def test_extract_compress(cph_test_file: Path, tmp_path: Path):
shutil.copy(cph_test_file, tmp_path / "mm")
mamba_exe = get_umamba()
mamba_exe = helpers.get_umamba()
out = tmp_path / "mm" / "out"
subprocess.call(
[
@ -131,9 +123,7 @@ def test_extract_compress(cph_test_file: Path, tmp_path: Path):
]
)
compare_two_tarfiles(
tarfile.open(cph_test_file), tarfile.open(tmp_path / "mm" / "out.tar.bz2")
)
compare_two_tarfiles(tarfile.open(cph_test_file), tarfile.open(tmp_path / "mm" / "out.tar.bz2"))
fout = tarfile.open(tmp_path / "mm" / "out.tar.bz2")
names = fout.getnames()
@ -155,10 +145,8 @@ def test_transmute(cph_test_file: Path, tmp_path: Path):
shutil.copy(cph_test_file, tmp_path)
shutil.copy(tmp_path / cph_test_file.name, tmp_path / "mm")
mamba_exe = get_umamba()
subprocess.call(
[mamba_exe, "package", "transmute", str(tmp_path / "mm" / cph_test_file.name)]
)
mamba_exe = helpers.get_umamba()
subprocess.call([mamba_exe, "package", "transmute", str(tmp_path / "mm" / cph_test_file.name)])
failed_files = cph.transmute(
str(tmp_path / cph_test_file.name), ".conda", out_folder=str(tmp_path / "cph")
)
@ -169,27 +157,23 @@ def test_transmute(cph_test_file: Path, tmp_path: Path):
cph.extract(str(tmp_path / "cph" / as_conda))
cph.extract(str(tmp_path / "mm" / as_conda))
conda = list((tmp_path / "cph").rglob("**/*"))
mamba = list((tmp_path / "mm").rglob("**/*"))
list((tmp_path / "cph").rglob("**/*"))
list((tmp_path / "mm").rglob("**/*"))
fcmp = filecmp.dircmp(
tmp_path / "cph" / "cph_test_data-0.0.1-0",
tmp_path / "mm" / "cph_test_data-0.0.1-0",
)
assert (
len(fcmp.left_only) == 0
and len(fcmp.right_only) == 0
and len(fcmp.diff_files) == 0
)
assert len(fcmp.left_only) == 0 and len(fcmp.right_only) == 0 and len(fcmp.diff_files) == 0
# fcmp.report_full_closure()
# extract zipfile
with zipfile.ZipFile(tmp_path / "mm" / as_conda, "r") as zip_ref:
l = zip_ref.namelist()
names = zip_ref.namelist()
assert l[2].startswith("info-")
assert l[0] == "metadata.json"
assert l[1].startswith("pkg-")
assert names[2].startswith("info-")
assert names[0] == "metadata.json"
assert names[1].startswith("pkg-")
zip_ref.extractall(tmp_path / "mm" / "zipcontents")

View File

@ -85,9 +85,7 @@ def tmp_cache_xtensor_hpp(tmp_cache_xtensor_dir: Path) -> Path:
class TestPkgCache:
def test_extracted_file_deleted(
self, tmp_home, tmp_cache_xtensor_hpp, tmp_root_prefix
):
def test_extracted_file_deleted(self, tmp_home, tmp_cache_xtensor_hpp, tmp_root_prefix):
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
os.remove(tmp_cache_xtensor_hpp)
@ -232,17 +230,13 @@ class TestPkgCache:
@pytest.fixture
def tmp_cache_alt(tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path) -> Path:
"""Make an alternative package cache outside the root prefix."""
cache = (
tmp_root_prefix / "more-pkgs"
) # Creating under root prefix to leverage eager cleanup
cache = tmp_root_prefix / "more-pkgs" # Creating under root prefix to leverage eager cleanup
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
return cache
def repodata_json(cache: Path) -> set[Path]:
return set((cache / "cache").glob("*.json")) - set(
(cache / "cache").glob("*.state.json")
)
return set((cache / "cache").glob("*.json")) - set((cache / "cache").glob("*.state.json"))
def repodata_solv(cache: Path) -> set[Path]:
@ -250,15 +244,11 @@ def repodata_solv(cache: Path) -> set[Path]:
def same_repodata_json_solv(cache: Path):
return {p.stem for p in repodata_json(cache)} == {
p.stem for p in repodata_solv(cache)
}
return {p.stem for p in repodata_json(cache)} == {p.stem for p in repodata_solv(cache)}
class TestMultiplePkgCaches:
@pytest.mark.parametrize(
"cache", (pytest.lazy_fixture(("tmp_cache", "tmp_cache_alt")))
)
@pytest.mark.parametrize("cache", (pytest.lazy_fixture(("tmp_cache", "tmp_cache_alt"))))
def test_different_caches(self, tmp_home, tmp_root_prefix, cache):
os.environ["CONDA_PKGS_DIRS"] = f"{cache}"
env_name = "some_env"
@ -312,12 +302,8 @@ class TestMultiplePkgCaches:
helpers.create("-n", "myenv", "xtensor", "--json", no_dry_run=True)
def test_no_writable_extracted_dir_corrupted(
self, tmp_home, tmp_root_prefix, tmp_cache
):
(
tmp_cache / find_pkg_build(tmp_cache, "xtensor") / helpers.xtensor_hpp
).unlink()
def test_no_writable_extracted_dir_corrupted(self, tmp_home, tmp_root_prefix, tmp_cache):
(tmp_cache / find_pkg_build(tmp_cache, "xtensor") / helpers.xtensor_hpp).unlink()
helpers.recursive_chmod(tmp_cache, 0o500)
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache}"

View File

@ -1,28 +1,29 @@
import json
import os
import platform
import random
import shutil
import string
import subprocess
import time
from pathlib import Path
import pytest
from .helpers import *
# Need to import everything to get fixtures
from .helpers import * # noqa: F403
from . import helpers
__this_dir__ = Path(__file__).parent.resolve()
@pytest.mark.skipif(dry_run_tests == DryRun.ULTRA_DRY, reason="Running ultra dry tests")
@pytest.mark.skipif(
helpers.dry_run_tests == helpers.DryRun.ULTRA_DRY, reason="Running ultra dry tests"
)
class TestRemove:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
cache = os.path.join(current_root_prefix, "pkgs")
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
@staticmethod
@ -30,8 +31,8 @@ class TestRemove:
def root(existing_cache):
os.environ["MAMBA_ROOT_PREFIX"] = TestRemove.root_prefix
os.environ["CONDA_PREFIX"] = TestRemove.prefix
create("-n", "base", no_dry_run=True)
create("xtensor", "-n", TestRemove.env_name, no_dry_run=True)
helpers.create("-n", "base", no_dry_run=True)
helpers.create("xtensor", "-n", TestRemove.env_name, no_dry_run=True)
yield
@ -42,19 +43,19 @@ class TestRemove:
@staticmethod
@pytest.fixture
def env_created(root):
if dry_run_tests == DryRun.OFF:
install("xtensor", "-n", TestRemove.env_name)
if helpers.dry_run_tests == helpers.DryRun.OFF:
helpers.install("xtensor", "-n", TestRemove.env_name)
@pytest.mark.parametrize("env_selector", ["", "name", "prefix"])
def test_remove(self, env_selector, env_created):
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
env_pkgs = [p["name"] for p in helpers.umamba_list("-p", TestRemove.prefix, "--json")]
if env_selector == "prefix":
res = remove("xtensor", "-p", TestRemove.prefix, "--json")
res = helpers.remove("xtensor", "-p", TestRemove.prefix, "--json")
elif env_selector == "name":
res = remove("xtensor", "-n", TestRemove.env_name, "--json")
res = helpers.remove("xtensor", "-n", TestRemove.env_name, "--json")
else:
res = remove("xtensor", "--dry-run", "--json")
res = helpers.remove("xtensor", "--dry-run", "--json")
keys = {"dry_run", "success", "prefix", "actions"}
assert keys.issubset(set(res.keys()))
@ -65,10 +66,10 @@ class TestRemove:
assert res["actions"]["PREFIX"] == TestRemove.prefix
def test_remove_orphaned(self, env_created):
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
env_pkgs = [p["name"] for p in helpers.umamba_list("-p", TestRemove.prefix, "--json")]
helpers.install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
res = remove("xframe", "-p", TestRemove.prefix, "--json")
res = helpers.remove("xframe", "-p", TestRemove.prefix, "--json")
keys = {"dry_run", "success", "prefix", "actions"}
assert keys.issubset(set(res.keys()))
@ -77,13 +78,13 @@ class TestRemove:
assert res["actions"]["UNLINK"][0]["name"] == "xframe"
assert res["actions"]["PREFIX"] == TestRemove.prefix
res = remove("xtensor", "-p", TestRemove.prefix, "--json")
res = helpers.remove("xtensor", "-p", TestRemove.prefix, "--json")
keys = {"dry_run", "success", "prefix", "actions"}
assert keys.issubset(set(res.keys()))
assert res["success"]
assert len(res["actions"]["UNLINK"]) == len(env_pkgs) + (
1 if dry_run_tests == DryRun.DRY else 0
1 if helpers.dry_run_tests == helpers.DryRun.DRY else 0
)
for p in res["actions"]["UNLINK"]:
assert p["name"] in env_pkgs
@ -92,10 +93,9 @@ class TestRemove:
def test_remove_force(self, env_created):
# check that we can remove a package without solving the environment (putting
# it in a bad state, actually)
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
helpers.install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
res = remove("xtl", "-p", TestRemove.prefix, "--json", "--force")
res = helpers.remove("xtl", "-p", TestRemove.prefix, "--json", "--force")
keys = {"dry_run", "success", "prefix", "actions"}
assert keys.issubset(set(res.keys()))
@ -105,10 +105,9 @@ class TestRemove:
assert res["actions"]["PREFIX"] == TestRemove.prefix
def test_remove_no_prune_deps(self, env_created):
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
helpers.install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
res = remove("xtensor", "-p", TestRemove.prefix, "--json", "--no-prune-deps")
res = helpers.remove("xtensor", "-p", TestRemove.prefix, "--json", "--no-prune-deps")
keys = {"dry_run", "success", "prefix", "actions"}
assert keys.issubset(set(res.keys()))
@ -120,24 +119,24 @@ class TestRemove:
assert res["actions"]["PREFIX"] == TestRemove.prefix
def test_remove_in_use(self, env_created):
install("python=3.9", "-n", self.env_name, "--json", no_dry_run=True)
helpers.install("python=3.9", "-n", self.env_name, "--json", no_dry_run=True)
if platform.system() == "Windows":
pyexe = Path(self.prefix) / "python.exe"
else:
pyexe = Path(self.prefix) / "bin" / "python"
env = get_fake_activate(self.prefix)
env = helpers.get_fake_activate(self.prefix)
pyproc = subprocess.Popen(
pyexe, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env
)
time.sleep(1)
res = remove("python", "-v", "-p", self.prefix, no_dry_run=True)
helpers.remove("python", "-v", "-p", self.prefix, no_dry_run=True)
if platform.system() == "Windows":
pyexe_trash = Path(str(pyexe) + ".mamba_trash")
assert pyexe.exists() == False
assert pyexe.exists() is False
pyexe_trash_exists = pyexe_trash.exists()
trash_file = Path(self.prefix) / "conda-meta" / "mamba_trash.txt"
@ -148,16 +147,16 @@ class TestRemove:
with open(trash_file, "r") as fi:
lines = [x.strip() for x in fi.readlines()]
assert all([l.endswith(".mamba_trash") for l in lines])
assert all([line.endswith(".mamba_trash") for line in lines])
assert len(all_trash_files) == len(lines)
linesp = [Path(self.prefix) / l for l in lines]
linesp = [Path(self.prefix) / line for line in lines]
for atf in all_trash_files:
assert atf in linesp
else:
assert trash_file.exists() == False
assert pyexe_trash.exists() == False
assert trash_file.exists() is False
assert pyexe_trash.exists() is False
# No change if file still in use
install("cpp-filesystem", "-n", self.env_name, "--json", no_dry_run=True)
helpers.install("cpp-filesystem", "-n", self.env_name, "--json", no_dry_run=True)
if pyexe_trash_exists:
assert trash_file.exists()
@ -165,24 +164,24 @@ class TestRemove:
with open(trash_file, "r") as fi:
lines = [x.strip() for x in fi.readlines()]
assert all([l.endswith(".mamba_trash") for l in lines])
assert all([line.endswith(".mamba_trash") for line in lines])
assert len(all_trash_files) == len(lines)
linesp = [Path(self.prefix) / l for l in lines]
linesp = [Path(self.prefix) / line for line in lines]
for atf in all_trash_files:
assert atf in linesp
else:
assert trash_file.exists() == False
assert pyexe_trash.exists() == False
assert trash_file.exists() is False
assert pyexe_trash.exists() is False
subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=pyproc.pid))
# check that another env mod clears lingering trash files
time.sleep(0.5)
install("xsimd", "-n", self.env_name, "--json", no_dry_run=True)
assert trash_file.exists() == False
assert pyexe_trash.exists() == False
helpers.install("xsimd", "-n", self.env_name, "--json", no_dry_run=True)
assert trash_file.exists() is False
assert pyexe_trash.exists() is False
else:
assert pyexe.exists() == False
assert pyexe.exists() is False
pyproc.kill()
@ -190,8 +189,8 @@ class TestRemoveConfig:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
@staticmethod
@ -199,8 +198,8 @@ class TestRemoveConfig:
def root(existing_cache):
os.environ["MAMBA_ROOT_PREFIX"] = TestRemoveConfig.root_prefix
os.environ["CONDA_PREFIX"] = TestRemoveConfig.prefix
create("-n", "base", no_dry_run=True)
create("-n", TestRemoveConfig.env_name, "--offline", no_dry_run=True)
helpers.create("-n", "base", no_dry_run=True)
helpers.create("-n", TestRemoveConfig.env_name, "--offline", no_dry_run=True)
yield
@ -226,10 +225,10 @@ class TestRemoveConfig:
assert res["target_prefix"] == target_prefix
assert res["use_target_prefix_fallback"]
checks = (
MAMBA_ALLOW_EXISTING_PREFIX
| MAMBA_NOT_ALLOW_MISSING_PREFIX
| MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
| MAMBA_EXPECT_EXISTING_PREFIX
helpers.MAMBA_ALLOW_EXISTING_PREFIX
| helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX
| helpers.MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
| helpers.MAMBA_EXPECT_EXISTING_PREFIX
)
assert res["target_prefix_checks"] == checks
@ -237,7 +236,7 @@ class TestRemoveConfig:
specs = ["xframe", "xtl"]
cmd = list(specs)
res = remove(*cmd, "--print-config-only")
res = helpers.remove(*cmd, "--print-config-only")
TestRemoveConfig.common_tests(res)
assert res["env_name"] == ""
@ -246,9 +245,9 @@ class TestRemoveConfig:
def test_remove_then_clean(self, env_created):
env_file = __this_dir__ / "env-requires-pip-install.yaml"
env_name = "env_to_clean"
create("-n", env_name, "-f", env_file, no_dry_run=True)
remove("-n", env_name, "pip", no_dry_run=True)
clean("-ay", no_dry_run=True)
helpers.create("-n", env_name, "-f", env_file, no_dry_run=True)
helpers.remove("-n", env_name, "pip", no_dry_run=True)
helpers.clean("-ay", no_dry_run=True)
@pytest.mark.parametrize("root_prefix", (None, "env_var", "cli"))
@pytest.mark.parametrize("target_is_root", (False, True))
@ -269,9 +268,7 @@ class TestRemoveConfig:
cmd = []
if root_prefix in (None, "cli"):
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop(
"MAMBA_ROOT_PREFIX"
)
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop("MAMBA_ROOT_PREFIX")
if root_prefix == "cli":
cmd += ["-r", TestRemoveConfig.root_prefix]
@ -299,11 +296,9 @@ class TestRemoveConfig:
else:
os.environ["CONDA_PREFIX"] = p
if (cli_prefix and cli_env_name) or not (
cli_prefix or cli_env_name or env_var or fallback
):
if (cli_prefix and cli_env_name) or not (cli_prefix or cli_env_name or env_var or fallback):
with pytest.raises(subprocess.CalledProcessError):
remove(*cmd, "--print-config-only")
helpers.remove(*cmd, "--print-config-only")
else:
res = remove(*cmd, "--print-config-only")
res = helpers.remove(*cmd, "--print-config-only")
TestRemoveConfig.common_tests(res, root_prefix=r, target_prefix=p)

View File

@ -63,9 +63,7 @@ def test_depends_not_installed_with_channel(yaml_env: Path, with_platform):
)
assert res["result"]["pkgs"][0]["subdir"] == "win-64"
else:
res = helpers.umamba_repoquery(
"depends", "-c", "conda-forge", "xtensor=0.24.5", "--json"
)
res = helpers.umamba_repoquery("depends", "-c", "conda-forge", "xtensor=0.24.5", "--json")
assert res["query"]["query"] == "xtensor=0.24.5"
assert res["query"]["type"] == "depends"
@ -85,9 +83,7 @@ def test_depends_not_installed_with_channel(yaml_env: Path, with_platform):
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_depends_recursive(yaml_env: Path):
res = helpers.umamba_repoquery(
"depends", "-c", "conda-forge", "xtensor=0.24.5", "--recursive"
)
res = helpers.umamba_repoquery("depends", "-c", "conda-forge", "xtensor=0.24.5", "--recursive")
if platform.system() == "Linux":
assert "libzlib" in res
@ -99,9 +95,7 @@ def test_depends_recursive(yaml_env: Path):
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_depends_tree(yaml_env: Path):
res = helpers.umamba_repoquery(
"depends", "-c", "conda-forge", "xtensor=0.24.5", "--tree"
)
res = helpers.umamba_repoquery("depends", "-c", "conda-forge", "xtensor=0.24.5", "--tree")
if platform.system() == "Linux":
assert "libzlib" in res
@ -151,9 +145,7 @@ def test_whoneeds_not_installed_with_channel(yaml_env: Path, with_platform):
)
assert res["result"]["pkgs"][0]["subdir"] == "osx-64"
else:
res = helpers.umamba_repoquery(
"whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--json"
)
res = helpers.umamba_repoquery("whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--json")
assert res["query"]["query"] == "xtensor=0.24.5"
assert res["query"]["type"] == "whoneeds"
@ -167,9 +159,7 @@ def test_whoneeds_not_installed_with_channel(yaml_env: Path, with_platform):
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_whoneeds_tree(yaml_env: Path):
res = helpers.umamba_repoquery(
"whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--tree"
)
res = helpers.umamba_repoquery("whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--tree")
assert "cppcolormap" in res
assert "pyxtensor" in res
@ -191,9 +181,7 @@ def test_search(yaml_env: Path, with_platform):
)
assert res["result"]["pkgs"][0]["subdir"] == "linux-64"
else:
res = helpers.umamba_repoquery(
"search", "-c", "conda-forge", "xtensor*", "--json"
)
res = helpers.umamba_repoquery("search", "-c", "conda-forge", "xtensor*", "--json")
assert res["query"]["query"] == "xtensor*"
assert res["query"]["type"] == "search"
@ -229,9 +217,7 @@ def test_local_search_installed_pkg(yaml_env: Path):
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
def test_remote_search_not_installed_pkg(yaml_env: Path):
res = helpers.umamba_repoquery(
"search", "-c", "conda-forge", "xtensor=0.24.5", "--json"
)
res = helpers.umamba_repoquery("search", "-c", "conda-forge", "xtensor=0.24.5", "--json")
assert res["query"]["query"] == "xtensor=0.24.5"
assert res["query"]["type"] == "search"

View File

@ -16,9 +16,7 @@ possible_characters_for_process_names = (
def generate_label_flags():
random_string = "".join(
random.choice(possible_characters_for_process_names) for _ in range(16)
)
random_string = "".join(random.choice(possible_characters_for_process_names) for _ in range(16))
return ["--label", random_string]
@ -36,14 +34,8 @@ class TestRun:
@pytest.mark.parametrize("option_flag", common_simple_flags)
@pytest.mark.parametrize("make_label_flags", next_label_flags)
def test_fail_without_command(self, option_flag, make_label_flags):
fails = True
try:
with pytest.raises(subprocess.CalledProcessError):
umamba_run(option_flag, *make_label_flags())
fails = False
except:
fails = True
assert fails == True
@pytest.mark.parametrize("option_flag", common_simple_flags)
@pytest.mark.parametrize("make_label_flags", next_label_flags)
@ -52,15 +44,14 @@ class TestRun:
try:
umamba_run(option_flag, *make_label_flags(), "exe-that-does-not-exists")
fails = False
except:
except subprocess.CalledProcessError:
fails = True
# In detach mode we fork micromamba and don't have a way to know if the executable exists.
if option_flag == "-d" or option_flag == "--detach":
assert fails == False
return
assert fails == True
assert fails is False
else:
assert fails is True
@pytest.mark.parametrize("option_flag", common_simple_flags)
# @pytest.mark.parametrize("label_flags", naming_flags()) # TODO: reactivate after fixing help flag not disactivating the run
@ -86,26 +77,19 @@ class TestRun:
@pytest.mark.skipif(platform == "win32", reason="requires bash to be available")
def test_shell_io_routing(self):
test_script_file_name = "test_run.sh"
test_script_path = os.path.join(
os.path.dirname(__file__), test_script_file_name
)
test_script_path = os.path.join(os.path.dirname(__file__), test_script_file_name)
if not os.path.isfile(test_script_path):
raise RuntimeError(
"missing test script '{}' at '{}".format(
test_script_file_name, test_script_path
)
"missing test script '{}' at '{}".format(test_script_file_name, test_script_path)
)
subprocess_run(test_script_path, shell=True)
def test_run_non_existing_env(self):
env_name = random_string()
try:
run_res = umamba_run("-n", env_name, "python")
umamba_run("-n", env_name, "python")
except subprocess.CalledProcessError as e:
assert (
"critical libmamba The given prefix does not exist:"
in e.stderr.decode()
)
assert "critical libmamba The given prefix does not exist:" in e.stderr.decode()
@pytest.fixture()
@ -129,7 +113,5 @@ def temp_env_prefix():
class TestRunVenv:
def test_classic_specs(self, temp_env_prefix):
res = umamba_run(
"-p", temp_env_prefix, "python", "-c", "import sys; print(sys.prefix)"
)
res = umamba_run("-p", temp_env_prefix, "python", "-c", "import sys; print(sys.prefix)")
assert res.strip() == temp_env_prefix

View File

@ -16,10 +16,7 @@ def skip_if_shell_incompat(shell_type):
if (
(plat_system == "Linux" and shell_type not in ("bash", "posix", "dash"))
or (plat_system == "Windows" and shell_type not in ("cmd.exe", "powershell"))
or (
plat_system == "Darwin"
and shell_type not in ("zsh", "bash", "posix", "dash")
)
or (plat_system == "Darwin" and shell_type not in ("zsh", "bash", "posix", "dash"))
):
pytest.skip("Incompatible shell/OS")
@ -103,15 +100,11 @@ def test_auto_detection(tmp_home, tmp_root_prefix):
print(res.stderr)
except Exception:
pass
return decode_json_output(
subprocess.check_output(cmd, text=True, encoding="utf-8")
)
return decode_json_output(subprocess.check_output(cmd, text=True, encoding="utf-8"))
if platform.system() == "Windows":
if "MAMBA_TEST_SHELL_TYPE" not in os.environ:
pytest.skip(
"'MAMBA_TEST_SHELL_TYPE' env variable needs to be defined to run this test"
)
pytest.skip("'MAMBA_TEST_SHELL_TYPE' env variable needs to be defined to run this test")
shell_type = os.environ["MAMBA_TEST_SHELL_TYPE"]
if shell_type == "bash":
pytest.skip(
@ -201,12 +194,8 @@ def test_activate_target_prefix_checks(tmp_home, tmp_root_prefix):
@pytest.mark.parametrize("shell_type", ["bash", "powershell", "cmd.exe"])
@pytest.mark.parametrize("prefix_selector", [None, "prefix"])
@pytest.mark.parametrize(
"multiple_time,same_prefix", ((False, None), (True, False), (True, True))
)
def test_init(
tmp_home, tmp_root_prefix, shell_type, prefix_selector, multiple_time, same_prefix
):
@pytest.mark.parametrize("multiple_time,same_prefix", ((False, None), (True, False), (True, True)))
def test_init(tmp_home, tmp_root_prefix, shell_type, prefix_selector, multiple_time, same_prefix):
skip_if_shell_incompat(shell_type)
if prefix_selector is None:
@ -223,9 +212,7 @@ def test_init(
"Windows long-path support already enabled.",
]
else:
assert helpers.shell(
"-y", "init", "-s", shell_type, "-r", tmp_root_prefix / "env"
)
assert helpers.shell("-y", "init", "-s", shell_type, "-r", tmp_root_prefix / "env")
if shell_type == "bash":
assert (tmp_root_prefix / "etc" / "profile.d").is_dir()

View File

@ -5,16 +5,20 @@ from pathlib import Path
import pytest
from .helpers import *
# Need to import everything to get fixtures
from .helpers import * # noqa: F403
from . import helpers
@pytest.mark.skipif(dry_run_tests == DryRun.ULTRA_DRY, reason="Running ultra dry tests")
@pytest.mark.skipif(
helpers.dry_run_tests == helpers.DryRun.ULTRA_DRY, reason="Running ultra dry tests"
)
class TestUpdate:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
old_version = "0.21.10"
medium_old_version = "0.22"
@ -34,15 +38,15 @@ class TestUpdate:
@staticmethod
@pytest.fixture
def env_created(root):
if dry_run_tests == DryRun.OFF:
create(
if helpers.dry_run_tests == helpers.DryRun.OFF:
helpers.create(
f"xtensor={TestUpdate.old_version}",
"-n",
TestUpdate.env_name,
"--json",
no_dry_run=True,
)
res = umamba_list("xtensor", "-n", TestUpdate.env_name, "--json")
res = helpers.umamba_list("xtensor", "-n", TestUpdate.env_name, "--json")
assert len(res) == 1
assert res[0]["version"].startswith(TestUpdate.old_version)
@ -51,19 +55,19 @@ class TestUpdate:
shutil.rmtree(TestUpdate.prefix)
def test_constrained_update(self, env_created):
update_res = update(
update_res = helpers.update(
"xtensor<=" + self.medium_old_version, "-n", env_created, "--json"
)
xtensor_link = [
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
][0]
assert xtensor_link["version"].startswith(self.medium_old_version)
# test that we relink noarch packages
def test_update_python_noarch(self, root):
if dry_run_tests == DryRun.OFF:
res_create = create(
if helpers.dry_run_tests == helpers.DryRun.OFF:
helpers.create(
"python=3.9",
"six",
"requests",
@ -75,12 +79,12 @@ class TestUpdate:
else:
return
res = umamba_list("python", "-n", TestUpdate.env_name, "--json")
res = helpers.umamba_list("python", "-n", TestUpdate.env_name, "--json")
assert len(res) >= 1
pyelem = [r for r in res if r["name"] == "python"][0]
assert pyelem["version"].startswith("3.9")
res = umamba_list("requests", "-n", TestUpdate.env_name, "--json")
res = helpers.umamba_list("requests", "-n", TestUpdate.env_name, "--json")
prev_requests = [r for r in res if r["name"] == "requests"][0]
assert prev_requests["version"]
@ -92,20 +96,22 @@ class TestUpdate:
assert os.path.exists(site_packages_path("requests/__pycache__", "3.9"))
prev_six = umamba_list("six", "-n", TestUpdate.env_name, "--json")[0]
prev_six = helpers.umamba_list("six", "-n", TestUpdate.env_name, "--json")[0]
update_res = update("-n", TestUpdate.env_name, "python=3.10", "--json")
update_res = helpers.update("-n", TestUpdate.env_name, "python=3.10", "--json")
six_link = [l for l in update_res["actions"]["LINK"] if l["name"] == "six"][0]
six_link = [
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "six"
][0]
assert six_link["version"] == prev_six["version"]
assert six_link["build_string"] == prev_six["build_string"]
requests_link = [
l for l in update_res["actions"]["LINK"] if l["name"] == "requests"
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "requests"
][0]
requests_unlink = [
l for l in update_res["actions"]["UNLINK"] if l["name"] == "requests"
to_link for to_link in update_res["actions"]["UNLINK"] if to_link["name"] == "requests"
][0]
assert requests_link["version"] == requests_unlink["version"]
@ -117,48 +123,48 @@ class TestUpdate:
assert requests_link["build_string"] == prev_requests["build_string"]
def test_further_constrained_update(self, env_created):
update_res = update("xtensor==0.21.1=*_0", "--json")
update_res = helpers.update("xtensor==0.21.1=*_0", "--json")
xtensor_link = [
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
][0]
assert xtensor_link["version"] == "0.21.1"
assert xtensor_link["build_number"] == 0
def test_classic_spec(self, env_created):
update_res = update("xtensor", "--json", "-n", TestUpdate.env_name)
update_res = helpers.update("xtensor", "--json", "-n", TestUpdate.env_name)
xtensor_link = [
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
][0]
assert TestUpdate.old_version != xtensor_link["version"]
if dry_run_tests == DryRun.OFF:
pkg = get_concrete_pkg(update_res, "xtensor")
pkg_info = get_concrete_pkg_info(get_env(TestUpdate.env_name), pkg)
if helpers.dry_run_tests == helpers.DryRun.OFF:
pkg = helpers.get_concrete_pkg(update_res, "xtensor")
pkg_info = helpers.get_concrete_pkg_info(helpers.get_env(TestUpdate.env_name), pkg)
version = pkg_info["version"]
assert TestUpdate.old_version != version
# This should do nothing since python is not installed!
update_res = update("python", "-n", TestUpdate.env_name, "--json")
update_res = helpers.update("python", "-n", TestUpdate.env_name, "--json")
# TODO fix this?!
assert update_res["message"] == "All requested packages already installed"
assert update_res["success"] == True
assert update_res["success"] is True
assert "action" not in update_res
def test_update_all(self, env_created):
update_res = update("--all", "--json")
update_res = helpers.update("--all", "--json")
xtensor_link = [
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
][0]
assert TestUpdate.old_version != xtensor_link["version"]
if dry_run_tests == DryRun.OFF:
pkg = get_concrete_pkg(update_res, "xtensor")
pkg_info = get_concrete_pkg_info(get_env(TestUpdate.env_name), pkg)
if helpers.dry_run_tests == helpers.DryRun.OFF:
pkg = helpers.get_concrete_pkg(update_res, "xtensor")
pkg_info = helpers.get_concrete_pkg_info(helpers.get_env(TestUpdate.env_name), pkg)
version = pkg_info["version"]
assert TestUpdate.old_version != version
@ -183,7 +189,7 @@ class TestUpdate:
)
def test_channel_alias(self, alias, env_created):
if alias:
res = update(
res = helpers.update(
"-n",
TestUpdate.env_name,
"xtensor",
@ -194,20 +200,20 @@ class TestUpdate:
)
ca = alias.rstrip("/")
else:
res = update("-n", TestUpdate.env_name, "xtensor", "--json", "--dry-run")
res = helpers.update("-n", TestUpdate.env_name, "xtensor", "--json", "--dry-run")
ca = "https://conda.anaconda.org"
for l in res["actions"]["LINK"]:
assert l["channel"].startswith(f"{ca}/conda-forge/")
assert l["url"].startswith(f"{ca}/conda-forge/")
for to_link in res["actions"]["LINK"]:
assert to_link["channel"].startswith(f"{ca}/conda-forge/")
assert to_link["url"].startswith(f"{ca}/conda-forge/")
class TestUpdateConfig:
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
current_prefix = os.environ["CONDA_PREFIX"]
env_name = random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
env_name = helpers.random_string()
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
prefix = os.path.join(root_prefix, "envs", env_name)
@staticmethod
@ -215,8 +221,8 @@ class TestUpdateConfig:
def root(existing_cache):
os.environ["MAMBA_ROOT_PREFIX"] = TestUpdateConfig.root_prefix
os.environ["CONDA_PREFIX"] = TestUpdateConfig.prefix
create("-n", "base", no_dry_run=True)
create("-n", TestUpdateConfig.env_name, "--offline", no_dry_run=True)
helpers.create("-n", "base", no_dry_run=True)
helpers.create("-n", TestUpdateConfig.env_name, "--offline", no_dry_run=True)
yield
@ -242,10 +248,10 @@ class TestUpdateConfig:
assert res["target_prefix"] == target_prefix
assert res["use_target_prefix_fallback"]
checks = (
MAMBA_ALLOW_EXISTING_PREFIX
| MAMBA_NOT_ALLOW_MISSING_PREFIX
| MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
| MAMBA_EXPECT_EXISTING_PREFIX
helpers.MAMBA_ALLOW_EXISTING_PREFIX
| helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX
| helpers.MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
| helpers.MAMBA_EXPECT_EXISTING_PREFIX
)
assert res["target_prefix_checks"] == checks
@ -270,7 +276,7 @@ class TestUpdateConfig:
cmd = list(specs)
if source in ("spec_file_only", "both"):
f_name = random_string()
f_name = helpers.random_string()
spec_file = os.path.join(TestUpdateConfig.root_prefix, f_name)
if file_type == "classic":
@ -293,7 +299,7 @@ class TestUpdateConfig:
cmd += ["-f", spec_file]
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
TestUpdateConfig.config_tests(res)
assert res["env_name"] == ""
@ -320,9 +326,7 @@ class TestUpdateConfig:
cmd = []
if root_prefix in (None, "cli"):
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop(
"MAMBA_ROOT_PREFIX"
)
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop("MAMBA_ROOT_PREFIX")
if root_prefix == "cli":
cmd += ["-r", TestUpdateConfig.root_prefix]
@ -345,7 +349,7 @@ class TestUpdateConfig:
cmd += ["-n", n]
if yaml_name:
f_name = random_string() + ".yaml"
f_name = helpers.random_string() + ".yaml"
spec_file = os.path.join(TestUpdateConfig.prefix, f_name)
if yaml_name == "prefix":
@ -353,9 +357,7 @@ class TestUpdateConfig:
else:
yaml_n = n
if not (cli_prefix or cli_env_name or target_is_root):
expected_p = os.path.join(
TestUpdateConfig.root_prefix, "envs", yaml_n
)
expected_p = os.path.join(TestUpdateConfig.root_prefix, "envs", yaml_n)
file_content = [
f"name: {yaml_n}",
@ -379,10 +381,10 @@ class TestUpdateConfig:
or (yaml_name == "prefix")
or not (cli_prefix or cli_env_name or yaml_name or env_var or fallback)
):
with pytest.raises(subprocess.CalledProcessError):
install(*cmd, "--print-config-only")
with pytest.raises(helpers.subprocess.CalledProcessError):
helpers.install(*cmd, "--print-config-only")
else:
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
TestUpdateConfig.config_tests(res, root_prefix=r, target_prefix=expected_p)
@pytest.mark.parametrize("cli", (False, True))
@ -398,7 +400,7 @@ class TestUpdateConfig:
expected_channels += ["cli"]
if yaml:
f_name = random_string() + ".yaml"
f_name = helpers.random_string() + ".yaml"
spec_file = os.path.join(TestUpdateConfig.prefix, f_name)
file_content = [
@ -416,7 +418,7 @@ class TestUpdateConfig:
expected_channels += ["env_var"]
if rc_file:
f_name = random_string() + ".yaml"
f_name = helpers.random_string() + ".yaml"
rc_file = os.path.join(TestUpdateConfig.prefix, f_name)
file_content = ["channels: [rc]"]
@ -426,9 +428,7 @@ class TestUpdateConfig:
cmd += ["--rc-file", rc_file]
expected_channels += ["rc"]
res = install(
*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False
)
res = helpers.install(*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False)
TestUpdateConfig.config_tests(res)
if expected_channels:
assert res["channels"] == expected_channels
@ -445,7 +445,7 @@ class TestUpdateConfig:
]
for i in range(2):
f_name = random_string()
f_name = helpers.random_string()
file = os.path.join(TestUpdateConfig.prefix, f_name)
if type == "yaml":
@ -453,7 +453,6 @@ class TestUpdateConfig:
file_content = [f"dependencies: [{specs[i]}]"]
elif type == "classic":
file_content = [specs[i]]
expected_specs = specs
else: # explicit
file_content = ["@EXPLICIT", explicit_specs[i]]
@ -463,16 +462,16 @@ class TestUpdateConfig:
cmd += ["-f", file]
if type == "yaml":
with pytest.raises(subprocess.CalledProcessError):
install(*cmd, "--print-config-only")
with pytest.raises(helpers.subprocess.CalledProcessError):
helpers.install(*cmd, "--print-config-only")
else:
res = install(*cmd, "--print-config-only")
res = helpers.install(*cmd, "--print-config-only")
if type == "classic":
assert res["specs"] == specs
else: # explicit
assert res["specs"] == [explicit_specs[0]]
def test_channel_specific(self, env_created):
install("quantstack::sphinx", no_dry_run=True)
res = update("quantstack::sphinx", "-c", "conda-forge", "--json")
helpers.install("quantstack::sphinx", no_dry_run=True)
res = helpers.update("quantstack::sphinx", "-c", "conda-forge", "--json")
assert "actions" not in res

View File

@ -1,8 +1,6 @@
import os
import platform
import pytest
from .helpers import info

View File

@ -2,3 +2,9 @@
minversion = "6.0"
tmp_path_retention_policy = "failed"
addopts = "--color=yes"
[tool.ruff]
line-length = 100
target-version = "py37"
[tool.ruff.format]
line-ending = "lf"

View File

@ -161,9 +161,7 @@ def main():
else:
sections[-1].items.append(Item())
sections[-1].items[-1].text = c[m.end() :].strip()
sections[-1].items[-1].applies_to = [
x.strip() for x in m.groups(1)[0].split(",")
]
sections[-1].items[-1].applies_to = [x.strip() for x in m.groups(1)[0].split(",")]
else:
if c.startswith(" "):