mirror of https://github.com/mamba-org/mamba.git
Move to Ruff (#3011)
* Use Ruff LSP * Remove unused imports * More Ruff fixes * Remove unused test vars * Explicit import * Explicit import * Explicit import * Explicit import * Explicit import * Warning fixes * Import fixtures * Add ruff pre-commit * Remove pre-commits checks superceded by ruff * Fix imports * Fix ruff warning * Fix test_linking fixtures * Fix typo * Fix test_update fixture import * Python line-length to 100 * Reformat Python code line length * Fix typo
This commit is contained in:
parent
7f325df6ad
commit
1230b92094
|
@ -1,15 +1,5 @@
|
||||||
exclude: libmamba/tests/data/repodata_json_cache*
|
exclude: libmamba/tests/data/repodata_json_cache*
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
|
||||||
rev: 23.9.1
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
args: [--safe, --quiet]
|
|
||||||
- repo: https://github.com/asottile/blacken-docs
|
|
||||||
rev: 1.16.0
|
|
||||||
hooks:
|
|
||||||
- id: blacken-docs
|
|
||||||
additional_dependencies: [black==22.3.0]
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
|
@ -26,22 +16,17 @@ repos:
|
||||||
args: [--autofix]
|
args: [--autofix]
|
||||||
- id: debug-statements
|
- id: debug-statements
|
||||||
language_version: python3
|
language_version: python3
|
||||||
- repo: https://github.com/pre-commit/mirrors-isort
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v5.10.1
|
rev: v0.1.6
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: ruff
|
||||||
exclude: tests/data
|
args: [ --fix ]
|
||||||
- repo: https://github.com/PyCQA/flake8
|
- id: ruff-format
|
||||||
rev: 6.1.0
|
- repo: https://github.com/asottile/blacken-docs
|
||||||
|
rev: 1.16.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: blacken-docs
|
||||||
language_version: python3
|
additional_dependencies: [black==22.3.0]
|
||||||
additional_dependencies:
|
|
||||||
- flake8-typing-imports==1.15.0
|
|
||||||
- flake8-builtins==2.1.0
|
|
||||||
- flake8-bugbear==23.9.16
|
|
||||||
- flake8-isort==6.1.0
|
|
||||||
exclude: libmambapy/src
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: v16.0.6
|
rev: v16.0.6
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
@ -14,9 +14,9 @@ dependencies:
|
||||||
- sel(osx): lldb
|
- sel(osx): lldb
|
||||||
- sel(linux): valgrind # Out of date on MacOS
|
- sel(linux): valgrind # Out of date on MacOS
|
||||||
# Python LSP support
|
# Python LSP support
|
||||||
|
- ruff
|
||||||
- python-lsp-server-base
|
- python-lsp-server-base
|
||||||
- python-lsp-black
|
- ruff-lsp
|
||||||
- black
|
|
||||||
# Interactive Python tools
|
# Interactive Python tools
|
||||||
- jupyterlab
|
- jupyterlab
|
||||||
- ipython
|
- ipython
|
||||||
|
|
|
@ -51,9 +51,7 @@ def figure_wrapper(directive, node, caption):
|
||||||
figure_node["align"] = node.attributes.pop("align")
|
figure_node["align"] = node.attributes.pop("align")
|
||||||
|
|
||||||
parsed = nodes.Element()
|
parsed = nodes.Element()
|
||||||
directive.state.nested_parse(
|
directive.state.nested_parse(ViewList([caption], source=""), directive.content_offset, parsed)
|
||||||
ViewList([caption], source=""), directive.content_offset, parsed
|
|
||||||
)
|
|
||||||
caption_node = nodes.caption(parsed[0].rawsource, "", *parsed[0].children)
|
caption_node = nodes.caption(parsed[0].rawsource, "", *parsed[0].children)
|
||||||
caption_node.source = parsed[0].source
|
caption_node.source = parsed[0].source
|
||||||
caption_node.line = parsed[0].line
|
caption_node.line = parsed[0].line
|
||||||
|
@ -87,8 +85,7 @@ class Mermaid(Directive):
|
||||||
if self.content:
|
if self.content:
|
||||||
return [
|
return [
|
||||||
document.reporter.warning(
|
document.reporter.warning(
|
||||||
"Mermaid directive cannot have both content and "
|
"Mermaid directive cannot have both content and " "a filename argument",
|
||||||
"a filename argument",
|
|
||||||
line=self.lineno,
|
line=self.lineno,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
@ -102,8 +99,7 @@ class Mermaid(Directive):
|
||||||
except (IOError, OSError): # noqa
|
except (IOError, OSError): # noqa
|
||||||
return [
|
return [
|
||||||
document.reporter.warning(
|
document.reporter.warning(
|
||||||
"External Mermaid file %r not found or reading "
|
"External Mermaid file %r not found or reading " "it failed" % filename,
|
||||||
"it failed" % filename,
|
|
||||||
line=self.lineno,
|
line=self.lineno,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
@ -144,9 +140,9 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
||||||
fmt = "png"
|
fmt = "png"
|
||||||
|
|
||||||
mermaid_cmd = self.builder.config.mermaid_cmd
|
mermaid_cmd = self.builder.config.mermaid_cmd
|
||||||
hashkey = (
|
hashkey = (code + str(options) + str(self.builder.config.mermaid_sequence_config)).encode(
|
||||||
code + str(options) + str(self.builder.config.mermaid_sequence_config)
|
"utf-8"
|
||||||
).encode("utf-8")
|
)
|
||||||
|
|
||||||
basename = "%s-%s" % (prefix, sha1(hashkey).hexdigest())
|
basename = "%s-%s" % (prefix, sha1(hashkey).hexdigest())
|
||||||
fname = "%s.%s" % (basename, fmt)
|
fname = "%s.%s" % (basename, fmt)
|
||||||
|
@ -189,8 +185,7 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
||||||
|
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise MermaidError(
|
raise MermaidError(
|
||||||
"Mermaid exited with error:\n[stderr]\n%s\n"
|
"Mermaid exited with error:\n[stderr]\n%s\n" "[stdout]\n%s" % (stderr, stdout)
|
||||||
"[stdout]\n%s" % (stderr, stdout)
|
|
||||||
)
|
)
|
||||||
if not os.path.isfile(outfn):
|
if not os.path.isfile(outfn):
|
||||||
raise MermaidError(
|
raise MermaidError(
|
||||||
|
@ -200,9 +195,7 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
||||||
return relfn, outfn
|
return relfn, outfn
|
||||||
|
|
||||||
|
|
||||||
def _render_mm_html_raw(
|
def _render_mm_html_raw(self, node, code, options, prefix="mermaid", imgcls=None, alt=None):
|
||||||
self, node, code, options, prefix="mermaid", imgcls=None, alt=None
|
|
||||||
):
|
|
||||||
if "align" in node:
|
if "align" in node:
|
||||||
tag_template = """<div align="{align}" class="mermaid align-{align}">
|
tag_template = """<div align="{align}" class="mermaid align-{align}">
|
||||||
{code}
|
{code}
|
||||||
|
@ -213,9 +206,7 @@ def _render_mm_html_raw(
|
||||||
{code}
|
{code}
|
||||||
</div>"""
|
</div>"""
|
||||||
|
|
||||||
self.body.append(
|
self.body.append(tag_template.format(align=node.get("align"), code=self.encode(code)))
|
||||||
tag_template.format(align=node.get("align"), code=self.encode(code))
|
|
||||||
)
|
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
|
|
||||||
|
@ -229,8 +220,7 @@ def render_mm_html(self, node, code, options, prefix="mermaid", imgcls=None, alt
|
||||||
try:
|
try:
|
||||||
if fmt not in ("png", "svg"):
|
if fmt not in ("png", "svg"):
|
||||||
raise MermaidError(
|
raise MermaidError(
|
||||||
"mermaid_output_format must be one of 'raw', 'png', "
|
"mermaid_output_format must be one of 'raw', 'png', " "'svg', but is %r" % fmt
|
||||||
"'svg', but is %r" % fmt
|
|
||||||
)
|
)
|
||||||
|
|
||||||
fname, outfn = render_mm(self, code, options, fmt, prefix)
|
fname, outfn = render_mm(self, code, options, fmt, prefix)
|
||||||
|
@ -295,8 +285,7 @@ def render_mm_latex(self, node, code, options, prefix="mermaid"):
|
||||||
|
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise MermaidError(
|
raise MermaidError(
|
||||||
"PdfCrop exited with error:\n[stderr]\n%s\n"
|
"PdfCrop exited with error:\n[stderr]\n%s\n" "[stdout]\n%s" % (stderr, stdout)
|
||||||
"[stdout]\n%s" % (stderr, stdout)
|
|
||||||
)
|
)
|
||||||
if not os.path.isfile(outfn):
|
if not os.path.isfile(outfn):
|
||||||
raise MermaidError(
|
raise MermaidError(
|
||||||
|
@ -304,9 +293,7 @@ def render_mm_latex(self, node, code, options, prefix="mermaid"):
|
||||||
"[stdout]\n%s" % (stderr, stdout)
|
"[stdout]\n%s" % (stderr, stdout)
|
||||||
)
|
)
|
||||||
|
|
||||||
fname = "{filename[0]}-crop{filename[1]}".format(
|
fname = "{filename[0]}-crop{filename[1]}".format(filename=os.path.splitext(fname))
|
||||||
filename=os.path.splitext(fname)
|
|
||||||
)
|
|
||||||
|
|
||||||
is_inline = self.is_inline(node)
|
is_inline = self.is_inline(node)
|
||||||
if is_inline:
|
if is_inline:
|
||||||
|
|
|
@ -208,9 +208,7 @@ class MermaidDiagram(InheritanceDiagram):
|
||||||
return [figure]
|
return [figure]
|
||||||
|
|
||||||
|
|
||||||
def html_visit_mermaid_inheritance(
|
def html_visit_mermaid_inheritance(self: HTMLTranslator, node: inheritance_diagram) -> None:
|
||||||
self: HTMLTranslator, node: inheritance_diagram
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Output the graph for HTML. This will insert a PNG with clickable
|
Output the graph for HTML. This will insert a PNG with clickable
|
||||||
image map.
|
image map.
|
||||||
|
@ -233,9 +231,7 @@ def html_visit_mermaid_inheritance(
|
||||||
urls[child["reftitle"]] = child.get("refuri")
|
urls[child["reftitle"]] = child.get("refuri")
|
||||||
elif child.get("refid") is not None:
|
elif child.get("refid") is not None:
|
||||||
if mermaid_output_format == "SVG":
|
if mermaid_output_format == "SVG":
|
||||||
urls[child["reftitle"]] = (
|
urls[child["reftitle"]] = "../" + current_filename + "#" + child.get("refid")
|
||||||
"../" + current_filename + "#" + child.get("refid")
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
urls[child["reftitle"]] = "#" + child.get("refid")
|
urls[child["reftitle"]] = "#" + child.get("refid")
|
||||||
dotcode = graph.generate_dot(name, urls, env=self.builder.env)
|
dotcode = graph.generate_dot(name, urls, env=self.builder.env)
|
||||||
|
@ -251,9 +247,7 @@ def html_visit_mermaid_inheritance(
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
|
|
||||||
def latex_visit_mermaid_inheritance(
|
def latex_visit_mermaid_inheritance(self: LaTeXTranslator, node: inheritance_diagram) -> None:
|
||||||
self: LaTeXTranslator, node: inheritance_diagram
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Output the graph for LaTeX. This will insert a PDF.
|
Output the graph for LaTeX. This will insert a PDF.
|
||||||
"""
|
"""
|
||||||
|
@ -271,9 +265,7 @@ def latex_visit_mermaid_inheritance(
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
|
|
||||||
def texinfo_visit_mermaid_inheritance(
|
def texinfo_visit_mermaid_inheritance(self: TexinfoTranslator, node: inheritance_diagram) -> None:
|
||||||
self: TexinfoTranslator, node: inheritance_diagram
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Output the graph for Texinfo. This will insert a PNG.
|
Output the graph for Texinfo. This will insert a PNG.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -48,17 +48,11 @@ def main():
|
||||||
parser = argparse.ArgumentParser(description="Generate binary header output")
|
parser = argparse.ArgumentParser(description="Generate binary header output")
|
||||||
parser.add_argument("-i", "--input", required=True, help="Input file", type=Path)
|
parser.add_argument("-i", "--input", required=True, help="Input file", type=Path)
|
||||||
parser.add_argument("-o", "--out", required=True, help="Output file", type=Path)
|
parser.add_argument("-o", "--out", required=True, help="Output file", type=Path)
|
||||||
parser.add_argument(
|
parser.add_argument("-v", "--var", required=True, help="Variable name to use in file")
|
||||||
"-v", "--var", required=True, help="Variable name to use in file"
|
parser.add_argument("-e", "--extern", action="store_true", help="Add 'extern' declaration")
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-e", "--extern", action="store_true", help="Add 'extern' declaration"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
argv_pretty = " ".join(
|
argv_pretty = " ".join(Path(arg).name if "/" in arg or "\\" in arg else arg for arg in sys.argv)
|
||||||
Path(arg).name if "/" in arg or "\\" in arg else arg for arg in sys.argv
|
|
||||||
)
|
|
||||||
comment = f"/* This file was generated using {argv_pretty} */"
|
comment = f"/* This file was generated using {argv_pretty} */"
|
||||||
|
|
||||||
out = bin2header(comment, args.input.read_bytes(), args.var, args.extern)
|
out = bin2header(comment, args.input.read_bytes(), args.var, args.extern)
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import libmambapy.version
|
import libmambapy.version
|
||||||
from libmambapy.bindings.legacy import * # Legacy which used to combine everything
|
|
||||||
|
# Legacy which used to combine everything
|
||||||
|
from libmambapy.bindings.legacy import * # noqa: F403
|
||||||
|
|
||||||
# Define top-level attributes
|
# Define top-level attributes
|
||||||
__version__ = libmambapy.version.__version__
|
__version__ = libmambapy.version.__version__
|
||||||
|
|
|
@ -977,9 +977,7 @@ class PackageInfo:
|
||||||
@typing.overload
|
@typing.overload
|
||||||
def __init__(self, name: str) -> None: ...
|
def __init__(self, name: str) -> None: ...
|
||||||
@typing.overload
|
@typing.overload
|
||||||
def __init__(
|
def __init__(self, name: str, version: str, build_string: str, build_number: int) -> None: ...
|
||||||
self, name: str, version: str, build_string: str, build_number: int
|
|
||||||
) -> None: ...
|
|
||||||
@property
|
@property
|
||||||
def build_number(self) -> int:
|
def build_number(self) -> int:
|
||||||
"""
|
"""
|
||||||
|
@ -1312,9 +1310,7 @@ class RootRole:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Solver:
|
class Solver:
|
||||||
def __init__(
|
def __init__(self, arg0: Pool, arg1: typing.List[typing.Tuple[int, int]]) -> None: ...
|
||||||
self, arg0: Pool, arg1: typing.List[typing.Tuple[int, int]]
|
|
||||||
) -> None: ...
|
|
||||||
def add_constraint(self, arg0: str) -> None: ...
|
def add_constraint(self, arg0: str) -> None: ...
|
||||||
def add_global_job(self, arg0: int) -> None: ...
|
def add_global_job(self, arg0: int) -> None: ...
|
||||||
def add_jobs(self, arg0: typing.List[str], arg1: int) -> None: ...
|
def add_jobs(self, arg0: typing.List[str], arg1: int) -> None: ...
|
||||||
|
@ -1326,9 +1322,7 @@ class Solver:
|
||||||
def must_solve(self) -> None: ...
|
def must_solve(self) -> None: ...
|
||||||
def problems_to_str(self) -> str: ...
|
def problems_to_str(self) -> str: ...
|
||||||
def set_flags(self, arg0: typing.List[typing.Tuple[int, int]]) -> None: ...
|
def set_flags(self, arg0: typing.List[typing.Tuple[int, int]]) -> None: ...
|
||||||
def set_postsolve_flags(
|
def set_postsolve_flags(self, arg0: typing.List[typing.Tuple[int, int]]) -> None: ...
|
||||||
self, arg0: typing.List[typing.Tuple[int, int]]
|
|
||||||
) -> None: ...
|
|
||||||
def solve(self) -> bool: ...
|
def solve(self) -> bool: ...
|
||||||
def try_solve(self) -> bool: ...
|
def try_solve(self) -> bool: ...
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[pycodestyle]
|
|
||||||
ignore = E5,W1,W2,W3,W5
|
|
|
@ -121,9 +121,7 @@ class RepoSigner:
|
||||||
fout.write(root_md_serialized_unsigned)
|
fout.write(root_md_serialized_unsigned)
|
||||||
|
|
||||||
# This overwrites the file with a signed version of the file.
|
# This overwrites the file with a signed version of the file.
|
||||||
cct_root_signing.sign_root_metadata_via_gpg(
|
cct_root_signing.sign_root_metadata_via_gpg(root_filepath, root_keys[0]["fingerprint"])
|
||||||
root_filepath, root_keys[0]["fingerprint"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Load untrusted signed root metadata.
|
# Load untrusted signed root metadata.
|
||||||
signed_root_md = cct_common.load_metadata_from_file(root_filepath)
|
signed_root_md = cct_common.load_metadata_from_file(root_filepath)
|
||||||
|
@ -133,9 +131,7 @@ class RepoSigner:
|
||||||
print("[reposigner] Root metadata signed & verified!")
|
print("[reposigner] Root metadata signed & verified!")
|
||||||
|
|
||||||
def create_key_mgr(self, keys):
|
def create_key_mgr(self, keys):
|
||||||
private_key_key_mgr = cct_common.PrivateKey.from_hex(
|
private_key_key_mgr = cct_common.PrivateKey.from_hex(keys["key_mgr"][0]["private"])
|
||||||
keys["key_mgr"][0]["private"]
|
|
||||||
)
|
|
||||||
pkg_mgr_pub_keys = [k["public"] for k in keys["pkg_mgr"]]
|
pkg_mgr_pub_keys = [k["public"] for k in keys["pkg_mgr"]]
|
||||||
key_mgr = cct_metadata_construction.build_delegating_metadata(
|
key_mgr = cct_metadata_construction.build_delegating_metadata(
|
||||||
metadata_type="key_mgr", # 'root' or 'key_mgr'
|
metadata_type="key_mgr", # 'root' or 'key_mgr'
|
||||||
|
@ -156,9 +152,7 @@ class RepoSigner:
|
||||||
|
|
||||||
# let's run a verification
|
# let's run a verification
|
||||||
root_metadata = cct_common.load_metadata_from_file(self.folder / "1.root.json")
|
root_metadata = cct_common.load_metadata_from_file(self.folder / "1.root.json")
|
||||||
key_mgr_metadata = cct_common.load_metadata_from_file(
|
key_mgr_metadata = cct_common.load_metadata_from_file(self.folder / "key_mgr.json")
|
||||||
self.folder / "key_mgr.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
cct_common.checkformat_signable(root_metadata)
|
cct_common.checkformat_signable(root_metadata)
|
||||||
|
|
||||||
|
@ -168,9 +162,7 @@ class RepoSigner:
|
||||||
root_delegations = root_metadata["signed"]["delegations"] # for brevity
|
root_delegations = root_metadata["signed"]["delegations"] # for brevity
|
||||||
cct_common.checkformat_delegations(root_delegations)
|
cct_common.checkformat_delegations(root_delegations)
|
||||||
if "key_mgr" not in root_delegations:
|
if "key_mgr" not in root_delegations:
|
||||||
raise ValueError(
|
raise ValueError('Missing expected delegation to "key_mgr" in root metadata.')
|
||||||
'Missing expected delegation to "key_mgr" in root metadata.'
|
|
||||||
)
|
|
||||||
cct_common.checkformat_delegation(root_delegations["key_mgr"])
|
cct_common.checkformat_delegation(root_delegations["key_mgr"])
|
||||||
|
|
||||||
# Doing delegation processing.
|
# Doing delegation processing.
|
||||||
|
@ -283,14 +275,10 @@ class ChannelHandler(SimpleHTTPRequestHandler):
|
||||||
self.wfile.write(b"no valid api key received")
|
self.wfile.write(b"no valid api key received")
|
||||||
|
|
||||||
|
|
||||||
global_parser = argparse.ArgumentParser(
|
global_parser = argparse.ArgumentParser(description="Start a multi-channel conda package server.")
|
||||||
description="Start a multi-channel conda package server."
|
|
||||||
)
|
|
||||||
global_parser.add_argument("-p", "--port", type=int, default=8000, help="Port to use.")
|
global_parser.add_argument("-p", "--port", type=int, default=8000, help="Port to use.")
|
||||||
|
|
||||||
channel_parser = argparse.ArgumentParser(
|
channel_parser = argparse.ArgumentParser(description="Start a simple conda package server.")
|
||||||
description="Start a simple conda package server."
|
|
||||||
)
|
|
||||||
channel_parser.add_argument(
|
channel_parser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
"--directory",
|
"--directory",
|
||||||
|
|
|
@ -93,9 +93,7 @@ def tmp_clean_env(tmp_environ: None) -> None:
|
||||||
if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA", "XDG_")):
|
if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA", "XDG_")):
|
||||||
del os.environ[k]
|
del os.environ[k]
|
||||||
|
|
||||||
def keep_in_path(
|
def keep_in_path(p: str, prefix: Optional[str] = tmp_environ.get("CONDA_PREFIX")) -> bool:
|
||||||
p: str, prefix: Optional[str] = tmp_environ.get("CONDA_PREFIX")
|
|
||||||
) -> bool:
|
|
||||||
if "condabin" in p:
|
if "condabin" in p:
|
||||||
return False
|
return False
|
||||||
# On windows, PATH is also used for dyanamic libraries.
|
# On windows, PATH is also used for dyanamic libraries.
|
||||||
|
|
|
@ -6,7 +6,6 @@ import random
|
||||||
import shutil
|
import shutil
|
||||||
import string
|
import string
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
@ -36,9 +35,7 @@ class DryRun(Enum):
|
||||||
use_offline = False
|
use_offline = False
|
||||||
channel = ["-c", "conda-forge"]
|
channel = ["-c", "conda-forge"]
|
||||||
dry_run_tests = DryRun(
|
dry_run_tests = DryRun(
|
||||||
os.environ["MAMBA_DRY_RUN_TESTS"]
|
os.environ["MAMBA_DRY_RUN_TESTS"] if ("MAMBA_DRY_RUN_TESTS" in os.environ) else "OFF"
|
||||||
if ("MAMBA_DRY_RUN_TESTS" in os.environ)
|
|
||||||
else "OFF"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
MAMBA_NO_PREFIX_CHECK = 1 << 0
|
MAMBA_NO_PREFIX_CHECK = 1 << 0
|
||||||
|
@ -153,7 +150,7 @@ def install(*args, default_channel=True, no_rc=True, no_dry_run=False, **kwargs)
|
||||||
try:
|
try:
|
||||||
j = json.loads(res)
|
j = json.loads(res)
|
||||||
return j
|
return j
|
||||||
except:
|
except Exception:
|
||||||
print(res.decode())
|
print(res.decode())
|
||||||
return
|
return
|
||||||
if "--print-config-only" in args:
|
if "--print-config-only" in args:
|
||||||
|
|
|
@ -46,9 +46,7 @@ class WindowsProfiles:
|
||||||
"-Command",
|
"-Command",
|
||||||
"$PROFILE.CurrentUserAllHosts",
|
"$PROFILE.CurrentUserAllHosts",
|
||||||
]
|
]
|
||||||
res = subprocess.run(
|
res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
|
||||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True
|
|
||||||
)
|
|
||||||
return res.stdout.decode("utf-8").strip()
|
return res.stdout.decode("utf-8").strip()
|
||||||
elif shell == "cmd.exe":
|
elif shell == "cmd.exe":
|
||||||
return None
|
return None
|
||||||
|
@ -159,9 +157,7 @@ def call_interpreter(s, tmp_path, interpreter, interactive=False, env=None):
|
||||||
if interpreter == "cmd.exe":
|
if interpreter == "cmd.exe":
|
||||||
mods = ["@chcp 65001>nul"]
|
mods = ["@chcp 65001>nul"]
|
||||||
for x in s:
|
for x in s:
|
||||||
if x.startswith("micromamba activate") or x.startswith(
|
if x.startswith("micromamba activate") or x.startswith("micromamba deactivate"):
|
||||||
"micromamba deactivate"
|
|
||||||
):
|
|
||||||
mods.append("call " + x)
|
mods.append("call " + x)
|
||||||
else:
|
else:
|
||||||
mods.append(x)
|
mods.append(x)
|
||||||
|
@ -306,15 +302,15 @@ def test_shell_init(
|
||||||
interpreter,
|
interpreter,
|
||||||
):
|
):
|
||||||
# TODO enable these tests also on win + bash!
|
# TODO enable these tests also on win + bash!
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
umamba = helpers.get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
run_dir = tmp_path / "rundir"
|
run_dir = tmp_path / "rundir"
|
||||||
run_dir.mkdir()
|
run_dir.mkdir()
|
||||||
call = lambda s: call_interpreter(s, run_dir, interpreter)
|
|
||||||
|
def call(s):
|
||||||
|
return call_interpreter(s, run_dir, interpreter)
|
||||||
|
|
||||||
rpv = shvar("MAMBA_ROOT_PREFIX", interpreter)
|
rpv = shvar("MAMBA_ROOT_PREFIX", interpreter)
|
||||||
s = [f"echo {rpv}"]
|
s = [f"echo {rpv}"]
|
||||||
|
@ -407,9 +403,7 @@ def test_shell_init_deinit_root_prefix_files(
|
||||||
tmp_path,
|
tmp_path,
|
||||||
interpreter,
|
interpreter,
|
||||||
):
|
):
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
umamba = helpers.get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
|
@ -495,9 +489,7 @@ def test_shell_init_deinit_contents(
|
||||||
tmp_path,
|
tmp_path,
|
||||||
interpreter,
|
interpreter,
|
||||||
):
|
):
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
umamba = helpers.get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
|
@ -542,9 +534,7 @@ def test_shell_init_deinit_contents(
|
||||||
|
|
||||||
@pytest.mark.parametrize("interpreter", get_interpreters())
|
@pytest.mark.parametrize("interpreter", get_interpreters())
|
||||||
def test_env_activation(tmp_home, winreg_value, tmp_root_prefix, tmp_path, interpreter):
|
def test_env_activation(tmp_home, winreg_value, tmp_root_prefix, tmp_path, interpreter):
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
umamba = helpers.get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
|
@ -552,7 +542,8 @@ def test_env_activation(tmp_home, winreg_value, tmp_root_prefix, tmp_path, inter
|
||||||
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
|
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
|
||||||
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
|
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
|
||||||
|
|
||||||
call = lambda s: call_interpreter(s, tmp_path, interpreter, interactive=True)
|
def call(s):
|
||||||
|
return call_interpreter(s, tmp_path, interpreter, interactive=True)
|
||||||
|
|
||||||
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
|
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
|
||||||
|
|
||||||
|
@ -641,9 +632,7 @@ def test_activation_envvars(
|
||||||
tmp_path,
|
tmp_path,
|
||||||
interpreter,
|
interpreter,
|
||||||
):
|
):
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
umamba = helpers.get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
|
@ -651,7 +640,8 @@ def test_activation_envvars(
|
||||||
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
|
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
|
||||||
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
|
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
|
||||||
|
|
||||||
call = lambda s: call_interpreter(s, tmp_path, interpreter, interactive=True)
|
def call(s):
|
||||||
|
return call_interpreter(s, tmp_path, interpreter, interactive=True)
|
||||||
|
|
||||||
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
|
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
|
||||||
|
|
||||||
|
@ -760,9 +750,7 @@ def test_unicode_activation(
|
||||||
tmp_path,
|
tmp_path,
|
||||||
interpreter,
|
interpreter,
|
||||||
):
|
):
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
umamba = helpers.get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
|
@ -770,7 +758,8 @@ def test_unicode_activation(
|
||||||
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
|
s = [f"{umamba} shell init -r {tmp_root_prefix}"]
|
||||||
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
|
stdout, stderr = call_interpreter(s, tmp_path, interpreter)
|
||||||
|
|
||||||
call = lambda s: call_interpreter(s, tmp_path, interpreter, interactive=True)
|
def call(s):
|
||||||
|
return call_interpreter(s, tmp_path, interpreter, interactive=True)
|
||||||
|
|
||||||
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
|
evars = extract_vars(["CONDA_PREFIX", "CONDA_SHLVL", "PATH"], interpreter)
|
||||||
|
|
||||||
|
@ -865,9 +854,7 @@ def test_unicode_activation(
|
||||||
|
|
||||||
@pytest.mark.parametrize("interpreter", get_interpreters())
|
@pytest.mark.parametrize("interpreter", get_interpreters())
|
||||||
def test_activate_path(tmp_empty_env, tmp_env_name, interpreter, tmp_path):
|
def test_activate_path(tmp_empty_env, tmp_env_name, interpreter, tmp_path):
|
||||||
if interpreter not in valid_interpreters or (
|
if interpreter not in valid_interpreters or (plat == "win" and interpreter == "bash"):
|
||||||
plat == "win" and interpreter == "bash"
|
|
||||||
):
|
|
||||||
pytest.skip(f"{interpreter} not available")
|
pytest.skip(f"{interpreter} not available")
|
||||||
|
|
||||||
# Activate env name
|
# Activate env name
|
||||||
|
|
|
@ -116,9 +116,7 @@ class TestConfigSources:
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"rc_file", (("home", "dummy.yaml"), ("home", ".mambarc")), indirect=True
|
"rc_file", (("home", "dummy.yaml"), ("home", ".mambarc")), indirect=True
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize("rc_file_args", ({"override_channels_enabled": True},), indirect=True)
|
||||||
"rc_file_args", ({"override_channels_enabled": True},), indirect=True
|
|
||||||
)
|
|
||||||
@pytest.mark.parametrize("quiet_flag", ["-q", "--quiet"])
|
@pytest.mark.parametrize("quiet_flag", ["-q", "--quiet"])
|
||||||
@pytest.mark.parametrize("norc", [False, True])
|
@pytest.mark.parametrize("norc", [False, True])
|
||||||
def test_config_sources(self, rc_file, quiet_flag, norc):
|
def test_config_sources(self, rc_file, quiet_flag, norc):
|
||||||
|
@ -172,15 +170,11 @@ class TestConfigSources:
|
||||||
),
|
),
|
||||||
indirect=True,
|
indirect=True,
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize("rc_file_args", ({"override_channels_enabled": True},), indirect=True)
|
||||||
"rc_file_args", ({"override_channels_enabled": True},), indirect=True
|
|
||||||
)
|
|
||||||
def test_config_rc_file(self, rc_file, tmp_env_name):
|
def test_config_rc_file(self, rc_file, tmp_env_name):
|
||||||
srcs = config("sources", "-n", tmp_env_name).strip().splitlines()
|
srcs = config("sources", "-n", tmp_env_name).strip().splitlines()
|
||||||
short_name = str(rc_file).replace(os.path.expanduser("~"), "~")
|
short_name = str(rc_file).replace(os.path.expanduser("~"), "~")
|
||||||
expected_srcs = (
|
expected_srcs = f"Configuration files (by precedence order):\n{short_name}".splitlines()
|
||||||
f"Configuration files (by precedence order):\n{short_name}".splitlines()
|
|
||||||
)
|
|
||||||
assert srcs == expected_srcs
|
assert srcs == expected_srcs
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -188,9 +182,7 @@ class TestConfigSources:
|
||||||
[("home", "somefile.yml")],
|
[("home", "somefile.yml")],
|
||||||
indirect=True,
|
indirect=True,
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize("rc_file_args", ({"override_channels_enabled": True},), indirect=True)
|
||||||
"rc_file_args", ({"override_channels_enabled": True},), indirect=True
|
|
||||||
)
|
|
||||||
def test_config_expand_user(self, rc_file):
|
def test_config_expand_user(self, rc_file):
|
||||||
rc_file_short = str(rc_file).replace(os.path.expanduser("~"), "~")
|
rc_file_short = str(rc_file).replace(os.path.expanduser("~"), "~")
|
||||||
res = config("sources", "--rc-file", rc_file)
|
res = config("sources", "--rc-file", rc_file)
|
||||||
|
@ -258,9 +250,7 @@ class TestConfigList:
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
config(
|
config("list", "--no-env", "--rc-file", rc_file, "-d", group_flag).splitlines()
|
||||||
"list", "--no-env", "--rc-file", rc_file, "-d", group_flag
|
|
||||||
).splitlines()
|
|
||||||
== f"{group}# channels\n# Define the list of channels\nchannels:\n"
|
== f"{group}# channels\n# Define the list of channels\nchannels:\n"
|
||||||
" - channel1\n - channel2\n".splitlines()
|
" - channel1\n - channel2\n".splitlines()
|
||||||
)
|
)
|
||||||
|
@ -283,18 +273,14 @@ class TestConfigList:
|
||||||
os.environ["MAMBA_OFFLINE"] = "false"
|
os.environ["MAMBA_OFFLINE"] = "false"
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
config(
|
config("list", "offline", "--no-rc", "--no-env", "-s", "--offline").splitlines()
|
||||||
"list", "offline", "--no-rc", "--no-env", "-s", "--offline"
|
|
||||||
).splitlines()
|
|
||||||
== "offline: true # 'CLI'".splitlines()
|
== "offline: true # 'CLI'".splitlines()
|
||||||
)
|
)
|
||||||
|
|
||||||
os.environ.pop("MAMBA_OFFLINE")
|
os.environ.pop("MAMBA_OFFLINE")
|
||||||
|
|
||||||
def test_precedence(self):
|
def test_precedence(self):
|
||||||
rc_dir = os.path.expanduser(
|
rc_dir = os.path.expanduser(os.path.join("~", "test_mamba", helpers.random_string()))
|
||||||
os.path.join("~", "test_mamba", helpers.random_string())
|
|
||||||
)
|
|
||||||
os.makedirs(rc_dir, exist_ok=True)
|
os.makedirs(rc_dir, exist_ok=True)
|
||||||
rc_file = os.path.join(rc_dir, ".mambarc")
|
rc_file = os.path.join(rc_dir, ".mambarc")
|
||||||
short_rc_file = rc_file.replace(os.path.expanduser("~"), "~")
|
short_rc_file = rc_file.replace(os.path.expanduser("~"), "~")
|
||||||
|
@ -322,9 +308,7 @@ class TestConfigList:
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
config(
|
config("list", "offline", f"--rc-file={rc_file}", "-s", "--offline").splitlines()
|
||||||
"list", "offline", f"--rc-file={rc_file}", "-s", "--offline"
|
|
||||||
).splitlines()
|
|
||||||
== f"offline: true # 'CLI' > 'MAMBA_OFFLINE' > '{short_rc_file}'".splitlines()
|
== f"offline: true # 'CLI' > 'MAMBA_OFFLINE' > '{short_rc_file}'".splitlines()
|
||||||
)
|
)
|
||||||
assert (
|
assert (
|
||||||
|
@ -359,18 +343,12 @@ class TestConfigList:
|
||||||
class TestConfigModifiers:
|
class TestConfigModifiers:
|
||||||
def test_file_set_single_input(self, rc_file):
|
def test_file_set_single_input(self, rc_file):
|
||||||
config("set", "json", "true", "--file", rc_file)
|
config("set", "json", "true", "--file", rc_file)
|
||||||
assert (
|
assert config("get", "json", "--file", rc_file).splitlines() == "json: true".splitlines()
|
||||||
config("get", "json", "--file", rc_file).splitlines()
|
|
||||||
== "json: true".splitlines()
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_file_set_change_key_value(self, rc_file):
|
def test_file_set_change_key_value(self, rc_file):
|
||||||
config("set", "json", "true", "--file", rc_file)
|
config("set", "json", "true", "--file", rc_file)
|
||||||
config("set", "json", "false", "--file", rc_file)
|
config("set", "json", "false", "--file", rc_file)
|
||||||
assert (
|
assert config("get", "json", "--file", rc_file).splitlines() == "json: false".splitlines()
|
||||||
config("get", "json", "--file", rc_file).splitlines()
|
|
||||||
== "json: false".splitlines()
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_file_set_invalit_input(self, rc_file):
|
def test_file_set_invalit_input(self, rc_file):
|
||||||
assert (
|
assert (
|
||||||
|
@ -721,9 +699,7 @@ class TestConfigExpandVars:
|
||||||
value = _expandvars(attr, "['${TEST_VAR}']", "foo")
|
value = _expandvars(attr, "['${TEST_VAR}']", "foo")
|
||||||
assert value == ["foo"]
|
assert value == ["foo"]
|
||||||
|
|
||||||
custom_channels = _expandvars(
|
custom_channels = _expandvars("custom_channels", "{'x': '${TEST_VAR}'}", "http://foo")
|
||||||
"custom_channels", "{'x': '${TEST_VAR}'}", "http://foo"
|
|
||||||
)
|
|
||||||
assert custom_channels["x"] == "http://foo"
|
assert custom_channels["x"] == "http://foo"
|
||||||
|
|
||||||
custom_multichannels = _expandvars(
|
custom_multichannels = _expandvars(
|
||||||
|
@ -793,9 +769,7 @@ class TestConfigExpandVars:
|
||||||
monkeypatch.setenv("foo", "bar", True)
|
monkeypatch.setenv("foo", "bar", True)
|
||||||
monkeypatch.setenv("{foo", "baz1", True)
|
monkeypatch.setenv("{foo", "baz1", True)
|
||||||
monkeypatch.setenv("{foo}", "baz2", True)
|
monkeypatch.setenv("{foo}", "baz2", True)
|
||||||
assert outp == self._roundtrip_attr(
|
assert outp == self._roundtrip_attr(rc_file, "channel_alias", yaml_quote + inp + yaml_quote)
|
||||||
rc_file, "channel_alias", yaml_quote + inp + yaml_quote
|
|
||||||
)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"inp,outp",
|
"inp,outp",
|
||||||
|
@ -840,9 +814,7 @@ class TestConfigExpandVars:
|
||||||
)
|
)
|
||||||
monkeypatch.setenv("CONDA_API_KEY", "kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk", True)
|
monkeypatch.setenv("CONDA_API_KEY", "kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk", True)
|
||||||
monkeypatch.setenv("CONDA_CHANNEL_UPLOAD_USER", "uuuuuuuuu", True)
|
monkeypatch.setenv("CONDA_CHANNEL_UPLOAD_USER", "uuuuuuuuu", True)
|
||||||
monkeypatch.setenv(
|
monkeypatch.setenv("CONDA_CHANNEL_UPLOAD_PASSWORD", "pppppppppppppppppppp", True)
|
||||||
"CONDA_CHANNEL_UPLOAD_PASSWORD", "pppppppppppppppppppp", True
|
|
||||||
)
|
|
||||||
out = self._roundtrip(rc_file, condarc)
|
out = self._roundtrip(rc_file, condarc)
|
||||||
assert (
|
assert (
|
||||||
out["channel_alias"]
|
out["channel_alias"]
|
||||||
|
|
|
@ -4,11 +4,11 @@ import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from .helpers import *
|
from . import helpers
|
||||||
|
|
||||||
|
|
||||||
def constructor(*args, default_channel=True, no_rc=True, no_dry_run=False):
|
def constructor(*args, default_channel=True, no_rc=True, no_dry_run=False):
|
||||||
umamba = get_umamba()
|
umamba = helpers.get_umamba()
|
||||||
cmd = [umamba, "constructor"] + [arg for arg in args if arg]
|
cmd = [umamba, "constructor"] + [arg for arg in args if arg]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -33,8 +33,8 @@ class TestInstall:
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
cache = os.path.join(current_root_prefix, "pkgs")
|
cache = os.path.join(current_root_prefix, "pkgs")
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
new_cache = os.path.join(root_prefix, "pkgs")
|
new_cache = os.path.join(root_prefix, "pkgs")
|
||||||
|
|
||||||
|
@ -46,17 +46,13 @@ class TestInstall:
|
||||||
# speed-up the tests
|
# speed-up the tests
|
||||||
os.environ["CONDA_PKGS_DIRS"] = TestInstall.new_cache
|
os.environ["CONDA_PKGS_DIRS"] = TestInstall.new_cache
|
||||||
os.makedirs(TestInstall.new_cache, exist_ok=True)
|
os.makedirs(TestInstall.new_cache, exist_ok=True)
|
||||||
root_pkgs = glob.glob(
|
root_pkgs = glob.glob(os.path.join(TestInstall.current_root_prefix, "pkgs", "x*.tar.bz2"))
|
||||||
os.path.join(TestInstall.current_root_prefix, "pkgs", "x*.tar.bz2")
|
|
||||||
)
|
|
||||||
urls = []
|
urls = []
|
||||||
|
|
||||||
for pkg in root_pkgs:
|
for pkg in root_pkgs:
|
||||||
shutil.copy(pkg, TestInstall.new_cache)
|
shutil.copy(pkg, TestInstall.new_cache)
|
||||||
urls.append(
|
urls.append(
|
||||||
"http://testurl.com/conda-forge/linux-64/"
|
"http://testurl.com/conda-forge/linux-64/" + os.path.basename(pkg) + "#123412341234"
|
||||||
+ os.path.basename(pkg)
|
|
||||||
+ "#123412341234"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cls.pkgs = [os.path.basename(pkg) for pkg in root_pkgs]
|
cls.pkgs = [os.path.basename(pkg) for pkg in root_pkgs]
|
||||||
|
@ -78,19 +74,12 @@ class TestInstall:
|
||||||
constructor("--prefix", TestInstall.root_prefix, "--extract-conda-pkgs")
|
constructor("--prefix", TestInstall.root_prefix, "--extract-conda-pkgs")
|
||||||
|
|
||||||
for pkg in self.pkgs:
|
for pkg in self.pkgs:
|
||||||
extracted_pkg = os.path.join(
|
extracted_pkg = os.path.join(TestInstall.root_prefix, "pkgs", pkg.rsplit(".tar.bz2")[0])
|
||||||
TestInstall.root_prefix, "pkgs", pkg.rsplit(".tar.bz2")[0]
|
with open(os.path.join(extracted_pkg, "info", "repodata_record.json")) as rr:
|
||||||
)
|
|
||||||
with open(
|
|
||||||
os.path.join(extracted_pkg, "info", "repodata_record.json")
|
|
||||||
) as rr:
|
|
||||||
repodata_record = json.load(rr)
|
repodata_record = json.load(rr)
|
||||||
with open(os.path.join(extracted_pkg, "info", "index.json")) as ri:
|
with open(os.path.join(extracted_pkg, "info", "index.json")) as ri:
|
||||||
index = json.load(ri)
|
index = json.load(ri)
|
||||||
assert repodata_record["fn"] == pkg
|
assert repodata_record["fn"] == pkg
|
||||||
assert repodata_record["md5"] == "123412341234"
|
assert repodata_record["md5"] == "123412341234"
|
||||||
assert (
|
assert repodata_record["url"] == "http://testurl.com/conda-forge/linux-64/" + pkg
|
||||||
repodata_record["url"]
|
|
||||||
== "http://testurl.com/conda-forge/linux-64/" + pkg
|
|
||||||
)
|
|
||||||
assert repodata_record["depends"] == index["depends"]
|
assert repodata_record["depends"] == index["depends"]
|
||||||
|
|
|
@ -110,41 +110,31 @@ def test_lockfile(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
assert res["success"]
|
assert res["success"]
|
||||||
|
|
||||||
packages = helpers.umamba_list("-p", env_prefix, "--json")
|
packages = helpers.umamba_list("-p", env_prefix, "--json")
|
||||||
assert any(
|
assert any(package["name"] == "zlib" and package["version"] == "1.2.11" for package in packages)
|
||||||
package["name"] == "zlib" and package["version"] == "1.2.11"
|
|
||||||
for package in packages
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_lockfile_online(tmp_home, tmp_root_prefix, tmp_path):
|
def test_lockfile_online(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
env_prefix = tmp_path / "myenv"
|
env_prefix = tmp_path / "myenv"
|
||||||
spec_file = "https://raw.githubusercontent.com/mamba-org/mamba/main/micromamba/tests/test_env-lock.yaml"
|
spec_file = (
|
||||||
|
"https://raw.githubusercontent.com/mamba-org/mamba/main/micromamba/tests/test_env-lock.yaml"
|
||||||
|
)
|
||||||
|
|
||||||
res = helpers.create("-p", env_prefix, "-f", spec_file, "--json")
|
res = helpers.create("-p", env_prefix, "-f", spec_file, "--json")
|
||||||
assert res["success"]
|
assert res["success"]
|
||||||
|
|
||||||
packages = helpers.umamba_list("-p", env_prefix, "--json")
|
packages = helpers.umamba_list("-p", env_prefix, "--json")
|
||||||
assert any(
|
assert any(package["name"] == "zlib" and package["version"] == "1.2.11" for package in packages)
|
||||||
package["name"] == "zlib" and package["version"] == "1.2.11"
|
|
||||||
for package in packages
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_env_lockfile_different_install_after_create(
|
def test_env_lockfile_different_install_after_create(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
tmp_home, tmp_root_prefix, tmp_path
|
|
||||||
):
|
|
||||||
env_prefix = tmp_path / "myenv"
|
env_prefix = tmp_path / "myenv"
|
||||||
create_spec_file = tmp_path / "env-create-lock.yaml"
|
create_spec_file = tmp_path / "env-create-lock.yaml"
|
||||||
install_spec_file = tmp_path / "env-install-lock.yaml"
|
install_spec_file = tmp_path / "env-install-lock.yaml"
|
||||||
|
|
||||||
shutil.copyfile(
|
shutil.copyfile(__this_dir__ / "envlockfile-check-step-1-lock.yaml", create_spec_file)
|
||||||
__this_dir__ / "envlockfile-check-step-1-lock.yaml", create_spec_file
|
shutil.copyfile(__this_dir__ / "envlockfile-check-step-2-lock.yaml", install_spec_file)
|
||||||
)
|
|
||||||
shutil.copyfile(
|
|
||||||
__this_dir__ / "envlockfile-check-step-2-lock.yaml", install_spec_file
|
|
||||||
)
|
|
||||||
|
|
||||||
res = helpers.create("-p", env_prefix, "-f", create_spec_file, "-y", "--json")
|
res = helpers.create("-p", env_prefix, "-f", create_spec_file, "-y", "--json")
|
||||||
assert res["success"]
|
assert res["success"]
|
||||||
|
@ -292,9 +282,7 @@ def test_channels(tmp_home, tmp_root_prefix, tmp_path, cli, yaml, env_var, rc_fi
|
||||||
cmd += ["--rc-file", rc_file]
|
cmd += ["--rc-file", rc_file]
|
||||||
expected_channels += ["rc"]
|
expected_channels += ["rc"]
|
||||||
|
|
||||||
res = helpers.create(
|
res = helpers.create(*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False)
|
||||||
*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False
|
|
||||||
)
|
|
||||||
check_create_result(res, tmp_root_prefix, env_prefix)
|
check_create_result(res, tmp_root_prefix, env_prefix)
|
||||||
if expected_channels:
|
if expected_channels:
|
||||||
assert res["channels"] == expected_channels
|
assert res["channels"] == expected_channels
|
||||||
|
@ -378,9 +366,7 @@ def test_multiprocessing():
|
||||||
"already_exists, is_conda_env", ((False, False), (True, False), (True, True))
|
"already_exists, is_conda_env", ((False, False), (True, False), (True, True))
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize("has_specs", (False, True))
|
@pytest.mark.parametrize("has_specs", (False, True))
|
||||||
def test_create_base(
|
def test_create_base(tmp_home, tmp_root_prefix, already_exists, is_conda_env, has_specs):
|
||||||
tmp_home, tmp_root_prefix, already_exists, is_conda_env, has_specs
|
|
||||||
):
|
|
||||||
if already_exists:
|
if already_exists:
|
||||||
if is_conda_env:
|
if is_conda_env:
|
||||||
(tmp_root_prefix / "conda-meta").mkdir()
|
(tmp_root_prefix / "conda-meta").mkdir()
|
||||||
|
@ -546,9 +532,7 @@ def test_always_yes(tmp_home, tmp_root_prefix, tmp_path, source):
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
@pytest.mark.parametrize("relocate_prefix", ["/home/bob/env", "/"])
|
@pytest.mark.parametrize("relocate_prefix", ["/home/bob/env", "/"])
|
||||||
def test_create_with_relocate_prefix(
|
def test_create_with_relocate_prefix(tmp_home, tmp_root_prefix, tmp_path, relocate_prefix):
|
||||||
tmp_home, tmp_root_prefix, tmp_path, relocate_prefix
|
|
||||||
):
|
|
||||||
env_prefix = tmp_path / "myenv"
|
env_prefix = tmp_path / "myenv"
|
||||||
res = helpers.create(
|
res = helpers.create(
|
||||||
"-p",
|
"-p",
|
||||||
|
@ -645,7 +629,7 @@ def test_spec_with_channel(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
def test_spec_with_channel_and_subdir():
|
def test_spec_with_channel_and_subdir():
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
try:
|
try:
|
||||||
res = helpers.create("-n", env_name, "conda-forge/noarch::xtensor", "--dry-run")
|
helpers.create("-n", env_name, "conda-forge/noarch::xtensor", "--dry-run")
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
assert e.stderr.decode() == (
|
assert e.stderr.decode() == (
|
||||||
'critical libmamba The package "conda-forge/noarch::xtensor" is '
|
'critical libmamba The package "conda-forge/noarch::xtensor" is '
|
||||||
|
@ -719,9 +703,7 @@ def test_pin_applicable(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
with open(rc_file, "w+") as f:
|
with open(rc_file, "w+") as f:
|
||||||
f.write(f"""pinned_packages: ["{pin_name}<={pin_max_version}"]""")
|
f.write(f"""pinned_packages: ["{pin_name}<={pin_max_version}"]""")
|
||||||
|
|
||||||
res = helpers.create(
|
res = helpers.create("-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False)
|
||||||
"-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False
|
|
||||||
)
|
|
||||||
|
|
||||||
install_pkg = None
|
install_pkg = None
|
||||||
for p in res["actions"]["LINK"]:
|
for p in res["actions"]["LINK"]:
|
||||||
|
@ -741,9 +723,7 @@ def test_pin_not_applicable(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
with open(rc_file, "w+") as f:
|
with open(rc_file, "w+") as f:
|
||||||
f.write(f"""pinned_packages: ["{pin_name}"]""")
|
f.write(f"""pinned_packages: ["{pin_name}"]""")
|
||||||
|
|
||||||
res = helpers.create(
|
res = helpers.create("-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False)
|
||||||
"-n", "myenv", f"--rc-file={rc_file}", "--json", spec_name, no_rc=False
|
|
||||||
)
|
|
||||||
assert res["success"] is True
|
assert res["success"] is True
|
||||||
helpers.get_concrete_pkg(res, spec_name) # Not trowing
|
helpers.get_concrete_pkg(res, spec_name) # Not trowing
|
||||||
|
|
||||||
|
@ -829,9 +809,7 @@ def test_create_check_dirs(tmp_home, tmp_root_prefix):
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
assert os.path.isdir(env_prefix / "lib" / "site-packages" / "traitlets")
|
assert os.path.isdir(env_prefix / "lib" / "site-packages" / "traitlets")
|
||||||
else:
|
else:
|
||||||
assert os.path.isdir(
|
assert os.path.isdir(env_prefix / "lib" / "python3.8" / "site-packages" / "traitlets")
|
||||||
env_prefix / "lib" / "python3.8" / "site-packages" / "traitlets"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
|
@ -843,9 +821,7 @@ def test_requires_pip_install(tmp_home, tmp_root_prefix, env_file):
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
@pytest.mark.parametrize("env_file", env_files)
|
@pytest.mark.parametrize("env_file", env_files)
|
||||||
def test_requires_pip_install_prefix_spaces(
|
def test_requires_pip_install_prefix_spaces(tmp_home, tmp_root_prefix, tmp_path, env_file):
|
||||||
tmp_home, tmp_root_prefix, tmp_path, env_file
|
|
||||||
):
|
|
||||||
env_prefix = tmp_path / "prefix with space"
|
env_prefix = tmp_path / "prefix with space"
|
||||||
cmd = ["-p", env_prefix, "-f", env_file]
|
cmd = ["-p", env_prefix, "-f", env_file]
|
||||||
helpers.create(*cmd)
|
helpers.create(*cmd)
|
||||||
|
@ -886,9 +862,7 @@ def test_pre_commit_compat(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
helpers.subprocess_run("git", "config", "user.name", "test", cwd=path)
|
helpers.subprocess_run("git", "config", "user.name", "test", cwd=path)
|
||||||
helpers.subprocess_run("git", "add", ".", cwd=path)
|
helpers.subprocess_run("git", "add", ".", cwd=path)
|
||||||
helpers.subprocess_run("git", "commit", "-m", "Initialize repo", cwd=path)
|
helpers.subprocess_run("git", "commit", "-m", "Initialize repo", cwd=path)
|
||||||
return helpers.subprocess_run(
|
return helpers.subprocess_run("git", "rev-parse", "HEAD", cwd=path, text=True).strip()
|
||||||
"git", "rev-parse", "HEAD", cwd=path, text=True
|
|
||||||
).strip()
|
|
||||||
|
|
||||||
hook_repo = tmp_path / "hook_repo"
|
hook_repo = tmp_path / "hook_repo"
|
||||||
caller_repo = tmp_path / "caller_repo"
|
caller_repo = tmp_path / "caller_repo"
|
||||||
|
@ -994,16 +968,12 @@ def copy_channels_osx():
|
||||||
)
|
)
|
||||||
with open(__this_dir__ / f"channel_{channel}/osx-64/repodata.json") as f:
|
with open(__this_dir__ / f"channel_{channel}/osx-64/repodata.json") as f:
|
||||||
repodata = f.read()
|
repodata = f.read()
|
||||||
with open(
|
with open(__this_dir__ / f"channel_{channel}/osx-64/repodata.json", "w") as f:
|
||||||
__this_dir__ / f"channel_{channel}/osx-64/repodata.json", "w"
|
|
||||||
) as f:
|
|
||||||
repodata = repodata.replace("linux", "osx")
|
repodata = repodata.replace("linux", "osx")
|
||||||
f.write(repodata)
|
f.write(repodata)
|
||||||
|
|
||||||
|
|
||||||
def test_dummy_create(
|
def test_dummy_create(add_glibc_virtual_package, copy_channels_osx, tmp_home, tmp_root_prefix):
|
||||||
add_glibc_virtual_package, copy_channels_osx, tmp_home, tmp_root_prefix
|
|
||||||
):
|
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
|
|
||||||
channels = [
|
channels = [
|
||||||
|
@ -1022,8 +992,7 @@ def test_dummy_create(
|
||||||
res = helpers.create_with_chan_pkg(env_name, channels, package)
|
res = helpers.create_with_chan_pkg(env_name, channels, package)
|
||||||
|
|
||||||
assert any(
|
assert any(
|
||||||
link["name"] == "b" and "channel_a" in link["channel"]
|
link["name"] == "b" and "channel_a" in link["channel"] for link in res["actions"]["LINK"]
|
||||||
for link in res["actions"]["LINK"]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
channels = channels[::-1]
|
channels = channels[::-1]
|
||||||
|
@ -1052,15 +1021,13 @@ def test_create_dry_run(tmp_home, tmp_root_prefix, use_json):
|
||||||
|
|
||||||
def test_create_with_non_existing_subdir(tmp_home, tmp_root_prefix, tmp_path):
|
def test_create_with_non_existing_subdir(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
env_prefix = tmp_path / "myprefix"
|
env_prefix = tmp_path / "myprefix"
|
||||||
with pytest.raises(subprocess.CalledProcessError) as e:
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
helpers.create(
|
helpers.create("-p", env_prefix, "--dry-run", "--json", "conda-forge/noarch::xtensor")
|
||||||
"-p", env_prefix, "--dry-run", "--json", f"conda-forge/noarch::xtensor"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_with_multiple_files(tmp_home, tmp_root_prefix, tmpdir):
|
def test_create_with_multiple_files(tmp_home, tmp_root_prefix, tmpdir):
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
env_prefix = tmp_root_prefix / "envs" / env_name
|
tmp_root_prefix / "envs" / env_name
|
||||||
|
|
||||||
# Check that multiple --file arguments are considered
|
# Check that multiple --file arguments are considered
|
||||||
(tmpdir / "file_a.txt").write(b"a")
|
(tmpdir / "file_a.txt").write(b"a")
|
||||||
|
@ -1097,7 +1064,7 @@ multichannel_config = {
|
||||||
|
|
||||||
def test_create_with_multi_channels(tmp_home, tmp_root_prefix, tmp_path):
|
def test_create_with_multi_channels(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
env_prefix = tmp_root_prefix / "envs" / env_name
|
tmp_root_prefix / "envs" / env_name
|
||||||
|
|
||||||
rc_file = tmp_path / "config.yaml"
|
rc_file = tmp_path / "config.yaml"
|
||||||
rc_file.write_text(yaml.dump(multichannel_config))
|
rc_file.write_text(yaml.dump(multichannel_config))
|
||||||
|
@ -1119,17 +1086,15 @@ def test_create_with_multi_channels(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
assert pkg["url"].startswith("https://conda.anaconda.org/conda-forge/")
|
assert pkg["url"].startswith("https://conda.anaconda.org/conda-forge/")
|
||||||
|
|
||||||
|
|
||||||
def test_create_with_multi_channels_and_non_existing_subdir(
|
def test_create_with_multi_channels_and_non_existing_subdir(tmp_home, tmp_root_prefix, tmp_path):
|
||||||
tmp_home, tmp_root_prefix, tmp_path
|
|
||||||
):
|
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
env_prefix = tmp_root_prefix / "envs" / env_name
|
tmp_root_prefix / "envs" / env_name
|
||||||
|
|
||||||
rc_file = tmp_path / "config.yaml"
|
rc_file = tmp_path / "config.yaml"
|
||||||
rc_file.write_text(yaml.dump(multichannel_config))
|
rc_file.write_text(yaml.dump(multichannel_config))
|
||||||
|
|
||||||
with pytest.raises(subprocess.CalledProcessError) as e:
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
res = helpers.create(
|
helpers.create(
|
||||||
"-n",
|
"-n",
|
||||||
env_name,
|
env_name,
|
||||||
"conda-forge2/noarch::xtensor",
|
"conda-forge2/noarch::xtensor",
|
||||||
|
|
|
@ -84,9 +84,7 @@ def test_env_export(export_env, explicit_flag, md5_flag, channel_subdir_flag):
|
||||||
assert set(ret["channels"]) == {"conda-forge"}
|
assert set(ret["channels"]) == {"conda-forge"}
|
||||||
assert "micromamba=0.24.0=0" in str(ret["dependencies"])
|
assert "micromamba=0.24.0=0" in str(ret["dependencies"])
|
||||||
if md5_flag == "--md5":
|
if md5_flag == "--md5":
|
||||||
assert re.search(
|
assert re.search(r"micromamba=0.24.0=0\[md5=[a-f0-9]{32}\]", str(ret["dependencies"]))
|
||||||
r"micromamba=0.24.0=0\[md5=[a-f0-9]{32}\]", str(ret["dependencies"])
|
|
||||||
)
|
|
||||||
if channel_subdir_flag:
|
if channel_subdir_flag:
|
||||||
assert re.search(
|
assert re.search(
|
||||||
r"conda-forge/[a-z0-9-]+::micromamba=0.24.0=0", str(ret["dependencies"])
|
r"conda-forge/[a-z0-9-]+::micromamba=0.24.0=0", str(ret["dependencies"])
|
||||||
|
@ -141,17 +139,13 @@ def test_env_update(tmp_home, tmp_root_prefix, tmp_path, prune):
|
||||||
env_name = "env-create-update"
|
env_name = "env-create-update"
|
||||||
|
|
||||||
# Create env with python=3.6.15 and xtensor=0.20.0
|
# Create env with python=3.6.15 and xtensor=0.20.0
|
||||||
helpers.create(
|
helpers.create("python=3.6.15", "xtensor=0.20.0", "-n", env_name, "--json", no_dry_run=True)
|
||||||
"python=3.6.15", "xtensor=0.20.0", "-n", env_name, "--json", no_dry_run=True
|
|
||||||
)
|
|
||||||
packages = helpers.umamba_list("-n", env_name, "--json")
|
packages = helpers.umamba_list("-n", env_name, "--json")
|
||||||
assert any(
|
assert any(
|
||||||
package["name"] == "python" and package["version"] == "3.6.15"
|
package["name"] == "python" and package["version"] == "3.6.15" for package in packages
|
||||||
for package in packages
|
|
||||||
)
|
)
|
||||||
assert any(
|
assert any(
|
||||||
package["name"] == "xtensor" and package["version"] == "0.20.0"
|
package["name"] == "xtensor" and package["version"] == "0.20.0" for package in packages
|
||||||
for package in packages
|
|
||||||
)
|
)
|
||||||
assert any(package["name"] == "xtl" for package in packages)
|
assert any(package["name"] == "xtl" for package in packages)
|
||||||
|
|
||||||
|
@ -176,8 +170,7 @@ def test_env_update(tmp_home, tmp_root_prefix, tmp_path, prune):
|
||||||
assert not any(package["name"] == "xtl" for package in packages)
|
assert not any(package["name"] == "xtl" for package in packages)
|
||||||
else:
|
else:
|
||||||
assert any(
|
assert any(
|
||||||
package["name"] == "xtensor" and package["version"] == "0.20.0"
|
package["name"] == "xtensor" and package["version"] == "0.20.0" for package in packages
|
||||||
for package in packages
|
|
||||||
)
|
)
|
||||||
assert any(package["name"] == "xtl" for package in packages)
|
assert any(package["name"] == "xtl" for package in packages)
|
||||||
|
|
||||||
|
@ -195,9 +188,9 @@ def test_explicit_export_topologically_sorted(tmp_home, tmp_prefix):
|
||||||
"pip": 0,
|
"pip": 0,
|
||||||
"jupyterlab": 0,
|
"jupyterlab": 0,
|
||||||
}
|
}
|
||||||
for i, l in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
for pkg in indices.keys():
|
for pkg in indices.keys():
|
||||||
if pkg in l:
|
if pkg in line:
|
||||||
indices[pkg] = i
|
indices[pkg] = i
|
||||||
|
|
||||||
assert indices["libzlib"] < indices["python"]
|
assert indices["libzlib"] < indices["python"]
|
||||||
|
|
|
@ -6,15 +6,17 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .helpers import *
|
# Need to import everything to get fixtures
|
||||||
|
from .helpers import * # noqa: F403
|
||||||
|
from . import helpers
|
||||||
|
|
||||||
|
|
||||||
class TestInstall:
|
class TestInstall:
|
||||||
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -24,7 +26,7 @@ class TestInstall:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_method(cls):
|
def setup_method(cls):
|
||||||
create("-n", TestInstall.env_name, "--offline", no_dry_run=True)
|
helpers.create("-n", TestInstall.env_name, "--offline", no_dry_run=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def teardown_class(cls):
|
def teardown_class(cls):
|
||||||
|
@ -41,7 +43,7 @@ class TestInstall:
|
||||||
os.environ.pop(v)
|
os.environ.pop(v)
|
||||||
|
|
||||||
if Path(TestInstall.prefix).exists():
|
if Path(TestInstall.prefix).exists():
|
||||||
rmtree(TestInstall.prefix)
|
helpers.rmtree(TestInstall.prefix)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def config_tests(cls, res, root_prefix=root_prefix, target_prefix=prefix):
|
def config_tests(cls, res, root_prefix=root_prefix, target_prefix=prefix):
|
||||||
|
@ -49,10 +51,10 @@ class TestInstall:
|
||||||
assert res["target_prefix"] == target_prefix
|
assert res["target_prefix"] == target_prefix
|
||||||
assert res["use_target_prefix_fallback"]
|
assert res["use_target_prefix_fallback"]
|
||||||
checks = (
|
checks = (
|
||||||
MAMBA_ALLOW_EXISTING_PREFIX
|
helpers.MAMBA_ALLOW_EXISTING_PREFIX
|
||||||
| MAMBA_NOT_ALLOW_MISSING_PREFIX
|
| helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX
|
||||||
| MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
|
| helpers.MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
|
||||||
| MAMBA_EXPECT_EXISTING_PREFIX
|
| helpers.MAMBA_EXPECT_EXISTING_PREFIX
|
||||||
)
|
)
|
||||||
assert res["target_prefix_checks"] == checks
|
assert res["target_prefix_checks"] == checks
|
||||||
|
|
||||||
|
@ -77,7 +79,7 @@ class TestInstall:
|
||||||
cmd = list(specs)
|
cmd = list(specs)
|
||||||
|
|
||||||
if source in ("spec_file_only", "both"):
|
if source in ("spec_file_only", "both"):
|
||||||
f_name = random_string()
|
f_name = helpers.random_string()
|
||||||
spec_file = os.path.join(TestInstall.root_prefix, f_name)
|
spec_file = os.path.join(TestInstall.root_prefix, f_name)
|
||||||
|
|
||||||
if file_type == "classic":
|
if file_type == "classic":
|
||||||
|
@ -86,10 +88,8 @@ class TestInstall:
|
||||||
elif file_type == "explicit":
|
elif file_type == "explicit":
|
||||||
channel = "https://conda.anaconda.org/conda-forge/linux-64/"
|
channel = "https://conda.anaconda.org/conda-forge/linux-64/"
|
||||||
explicit_specs = [
|
explicit_specs = [
|
||||||
channel
|
channel + "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
|
||||||
+ "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
|
channel + "xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
|
||||||
channel
|
|
||||||
+ "xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
|
|
||||||
]
|
]
|
||||||
file_content = ["@EXPLICIT"] + explicit_specs
|
file_content = ["@EXPLICIT"] + explicit_specs
|
||||||
specs = explicit_specs
|
specs = explicit_specs
|
||||||
|
@ -103,7 +103,7 @@ class TestInstall:
|
||||||
|
|
||||||
cmd += ["-f", spec_file]
|
cmd += ["-f", spec_file]
|
||||||
|
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
|
|
||||||
TestInstall.config_tests(res)
|
TestInstall.config_tests(res)
|
||||||
assert res["env_name"] == ""
|
assert res["env_name"] == ""
|
||||||
|
@ -130,9 +130,7 @@ class TestInstall:
|
||||||
cmd = []
|
cmd = []
|
||||||
|
|
||||||
if root_prefix in (None, "cli"):
|
if root_prefix in (None, "cli"):
|
||||||
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop(
|
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop("MAMBA_ROOT_PREFIX")
|
||||||
"MAMBA_ROOT_PREFIX"
|
|
||||||
)
|
|
||||||
|
|
||||||
if root_prefix == "cli":
|
if root_prefix == "cli":
|
||||||
cmd += ["-r", TestInstall.root_prefix]
|
cmd += ["-r", TestInstall.root_prefix]
|
||||||
|
@ -155,7 +153,7 @@ class TestInstall:
|
||||||
cmd += ["-n", n]
|
cmd += ["-n", n]
|
||||||
|
|
||||||
if yaml_name:
|
if yaml_name:
|
||||||
f_name = random_string() + ".yaml"
|
f_name = helpers.random_string() + ".yaml"
|
||||||
spec_file = os.path.join(TestInstall.prefix, f_name)
|
spec_file = os.path.join(TestInstall.prefix, f_name)
|
||||||
|
|
||||||
if yaml_name == "prefix":
|
if yaml_name == "prefix":
|
||||||
|
@ -188,9 +186,9 @@ class TestInstall:
|
||||||
or not (cli_prefix or cli_env_name or yaml_name or env_var or fallback)
|
or not (cli_prefix or cli_env_name or yaml_name or env_var or fallback)
|
||||||
):
|
):
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
install(*cmd, "--print-config-only")
|
helpers.install(*cmd, "--print-config-only")
|
||||||
else:
|
else:
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
TestInstall.config_tests(res, root_prefix=r, target_prefix=expected_p)
|
TestInstall.config_tests(res, root_prefix=r, target_prefix=expected_p)
|
||||||
|
|
||||||
@pytest.mark.parametrize("cli", (False, True))
|
@pytest.mark.parametrize("cli", (False, True))
|
||||||
|
@ -206,7 +204,7 @@ class TestInstall:
|
||||||
expected_channels += ["cli"]
|
expected_channels += ["cli"]
|
||||||
|
|
||||||
if yaml:
|
if yaml:
|
||||||
f_name = random_string() + ".yaml"
|
f_name = helpers.random_string() + ".yaml"
|
||||||
spec_file = os.path.join(TestInstall.prefix, f_name)
|
spec_file = os.path.join(TestInstall.prefix, f_name)
|
||||||
|
|
||||||
file_content = [
|
file_content = [
|
||||||
|
@ -224,7 +222,7 @@ class TestInstall:
|
||||||
expected_channels += ["env_var"]
|
expected_channels += ["env_var"]
|
||||||
|
|
||||||
if rc_file:
|
if rc_file:
|
||||||
f_name = random_string() + ".yaml"
|
f_name = helpers.random_string() + ".yaml"
|
||||||
rc_file = os.path.join(TestInstall.prefix, f_name)
|
rc_file = os.path.join(TestInstall.prefix, f_name)
|
||||||
|
|
||||||
file_content = ["channels: [rc]"]
|
file_content = ["channels: [rc]"]
|
||||||
|
@ -234,9 +232,7 @@ class TestInstall:
|
||||||
cmd += ["--rc-file", rc_file]
|
cmd += ["--rc-file", rc_file]
|
||||||
expected_channels += ["rc"]
|
expected_channels += ["rc"]
|
||||||
|
|
||||||
res = install(
|
res = helpers.install(*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False)
|
||||||
*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False
|
|
||||||
)
|
|
||||||
TestInstall.config_tests(res)
|
TestInstall.config_tests(res)
|
||||||
if expected_channels:
|
if expected_channels:
|
||||||
assert res["channels"] == expected_channels
|
assert res["channels"] == expected_channels
|
||||||
|
@ -249,14 +245,12 @@ class TestInstall:
|
||||||
specs = ["xtensor", "xsimd"]
|
specs = ["xtensor", "xsimd"]
|
||||||
channel = "https://conda.anaconda.org/conda-forge/linux-64/"
|
channel = "https://conda.anaconda.org/conda-forge/linux-64/"
|
||||||
explicit_specs = [
|
explicit_specs = [
|
||||||
channel
|
channel + "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
|
||||||
+ "xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887",
|
channel + "linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
|
||||||
channel
|
|
||||||
+ "linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for i in range(2):
|
for i in range(2):
|
||||||
f_name = random_string()
|
f_name = helpers.random_string()
|
||||||
file = os.path.join(TestInstall.prefix, f_name)
|
file = os.path.join(TestInstall.prefix, f_name)
|
||||||
|
|
||||||
if type == "yaml":
|
if type == "yaml":
|
||||||
|
@ -264,7 +258,6 @@ class TestInstall:
|
||||||
file_content = [f"dependencies: [{specs[i]}]"]
|
file_content = [f"dependencies: [{specs[i]}]"]
|
||||||
elif type == "classic":
|
elif type == "classic":
|
||||||
file_content = [specs[i]]
|
file_content = [specs[i]]
|
||||||
expected_specs = specs
|
|
||||||
else: # explicit
|
else: # explicit
|
||||||
file_content = ["@EXPLICIT", explicit_specs[i]]
|
file_content = ["@EXPLICIT", explicit_specs[i]]
|
||||||
|
|
||||||
|
@ -275,9 +268,9 @@ class TestInstall:
|
||||||
|
|
||||||
if type == "yaml":
|
if type == "yaml":
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
install(*cmd, "--print-config-only")
|
helpers.install(*cmd, "--print-config-only")
|
||||||
else:
|
else:
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
if type == "classic":
|
if type == "classic":
|
||||||
assert res["specs"] == specs
|
assert res["specs"] == specs
|
||||||
else: # explicit
|
else: # explicit
|
||||||
|
@ -286,9 +279,7 @@ class TestInstall:
|
||||||
@pytest.mark.parametrize("priority", (None, "disabled", "flexible", "strict"))
|
@pytest.mark.parametrize("priority", (None, "disabled", "flexible", "strict"))
|
||||||
@pytest.mark.parametrize("no_priority", (None, True))
|
@pytest.mark.parametrize("no_priority", (None, True))
|
||||||
@pytest.mark.parametrize("strict_priority", (None, True))
|
@pytest.mark.parametrize("strict_priority", (None, True))
|
||||||
def test_channel_priority(
|
def test_channel_priority(self, priority, no_priority, strict_priority, existing_cache):
|
||||||
self, priority, no_priority, strict_priority, existing_cache
|
|
||||||
):
|
|
||||||
cmd = ["-p", TestInstall.prefix, "xtensor"]
|
cmd = ["-p", TestInstall.prefix, "xtensor"]
|
||||||
expected_priority = "flexible"
|
expected_priority = "flexible"
|
||||||
|
|
||||||
|
@ -311,14 +302,14 @@ class TestInstall:
|
||||||
or (no_priority and strict_priority)
|
or (no_priority and strict_priority)
|
||||||
):
|
):
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
install(*cmd, "--print-config-only")
|
helpers.install(*cmd, "--print-config-only")
|
||||||
else:
|
else:
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
assert res["channel_priority"] == expected_priority
|
assert res["channel_priority"] == expected_priority
|
||||||
|
|
||||||
def test_quotes(self, existing_cache):
|
def test_quotes(self, existing_cache):
|
||||||
cmd = ["-p", f"{TestInstall.prefix}", "xtensor", "--print-config-only"]
|
cmd = ["-p", f"{TestInstall.prefix}", "xtensor", "--print-config-only"]
|
||||||
res = install(*cmd)
|
res = helpers.install(*cmd)
|
||||||
assert res["target_prefix"] == TestInstall.prefix
|
assert res["target_prefix"] == TestInstall.prefix
|
||||||
|
|
||||||
@pytest.mark.parametrize("prefix", ("target", "root"))
|
@pytest.mark.parametrize("prefix", ("target", "root"))
|
||||||
|
@ -338,27 +329,28 @@ class TestInstall:
|
||||||
"xtensor",
|
"xtensor",
|
||||||
"--print-config-only",
|
"--print-config-only",
|
||||||
]
|
]
|
||||||
res = install(*cmd)
|
res = helpers.install(*cmd)
|
||||||
assert res["target_prefix"] == TestInstall.prefix
|
assert res["target_prefix"] == TestInstall.prefix
|
||||||
assert res["root_prefix"] == TestInstall.root_prefix
|
assert res["root_prefix"] == TestInstall.root_prefix
|
||||||
|
|
||||||
def test_empty_specs(self, existing_cache):
|
def test_empty_specs(self, existing_cache):
|
||||||
assert "Nothing to do." in install().strip()
|
assert "Nothing to do." in helpers.install().strip()
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
|
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
|
||||||
|
reason="Running only ultra-dry tests",
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize("already_installed", [False, True])
|
@pytest.mark.parametrize("already_installed", [False, True])
|
||||||
def test_non_explicit_spec(self, already_installed, existing_cache):
|
def test_non_explicit_spec(self, already_installed, existing_cache):
|
||||||
cmd = ["-p", TestInstall.prefix, "xtensor", "--json"]
|
cmd = ["-p", TestInstall.prefix, "xtensor", "--json"]
|
||||||
|
|
||||||
if already_installed:
|
if already_installed:
|
||||||
install(*cmd, no_dry_run=True)
|
helpers.install(*cmd, no_dry_run=True)
|
||||||
|
|
||||||
res = install(*cmd)
|
res = helpers.install(*cmd)
|
||||||
|
|
||||||
assert res["success"]
|
assert res["success"]
|
||||||
assert res["dry_run"] == (dry_run_tests == DryRun.DRY)
|
assert res["dry_run"] == (helpers.dry_run_tests == helpers.DryRun.DRY)
|
||||||
if already_installed:
|
if already_installed:
|
||||||
keys = {"dry_run", "success", "prefix", "message"}
|
keys = {"dry_run", "success", "prefix", "message"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -373,15 +365,16 @@ class TestInstall:
|
||||||
expected_packages = {"xtensor", "xtl"}
|
expected_packages = {"xtensor", "xtl"}
|
||||||
assert expected_packages.issubset(packages)
|
assert expected_packages.issubset(packages)
|
||||||
|
|
||||||
if not dry_run_tests:
|
if not helpers.dry_run_tests:
|
||||||
pkg_name = get_concrete_pkg(res, "xtensor")
|
pkg_name = helpers.get_concrete_pkg(res, "xtensor")
|
||||||
orig_file_path = get_pkg(
|
orig_file_path = helpers.get_pkg(
|
||||||
pkg_name, xtensor_hpp, TestInstall.current_root_prefix
|
pkg_name, helpers.xtensor_hpp, TestInstall.current_root_prefix
|
||||||
)
|
)
|
||||||
assert orig_file_path.exists()
|
assert orig_file_path.exists()
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
|
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
|
||||||
|
reason="Running only ultra-dry tests",
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize("already_installed", [False, True])
|
@pytest.mark.parametrize("already_installed", [False, True])
|
||||||
@pytest.mark.parametrize("valid", [False, True])
|
@pytest.mark.parametrize("valid", [False, True])
|
||||||
|
@ -400,9 +393,9 @@ class TestInstall:
|
||||||
cmd = ("-p", TestInstall.prefix, "-q", "-f", spec_file)
|
cmd = ("-p", TestInstall.prefix, "-q", "-f", spec_file)
|
||||||
|
|
||||||
if valid:
|
if valid:
|
||||||
install(*cmd, default_channel=False)
|
helpers.install(*cmd, default_channel=False)
|
||||||
|
|
||||||
list_res = umamba_list("-p", TestInstall.prefix, "--json")
|
list_res = helpers.umamba_list("-p", TestInstall.prefix, "--json")
|
||||||
assert len(list_res) == 1
|
assert len(list_res) == 1
|
||||||
pkg = list_res[0]
|
pkg = list_res[0]
|
||||||
assert pkg["name"] == "xtensor"
|
assert pkg["name"] == "xtensor"
|
||||||
|
@ -410,10 +403,11 @@ class TestInstall:
|
||||||
assert pkg["build_string"] == "hc9558a2_0"
|
assert pkg["build_string"] == "hc9558a2_0"
|
||||||
else:
|
else:
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
install(*cmd, default_channel=False)
|
helpers.install(*cmd, default_channel=False)
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
|
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
|
||||||
|
reason="Running only ultra-dry tests",
|
||||||
)
|
)
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"alias",
|
"alias",
|
||||||
|
@ -426,22 +420,23 @@ class TestInstall:
|
||||||
)
|
)
|
||||||
def test_channel_alias(self, alias, existing_cache):
|
def test_channel_alias(self, alias, existing_cache):
|
||||||
if alias:
|
if alias:
|
||||||
res = install("xtensor", "--json", "--channel-alias", alias)
|
res = helpers.install("xtensor", "--json", "--channel-alias", alias)
|
||||||
ca = alias.rstrip("/")
|
ca = alias.rstrip("/")
|
||||||
else:
|
else:
|
||||||
res = install("xtensor", "--json")
|
res = helpers.install("xtensor", "--json")
|
||||||
ca = "https://conda.anaconda.org"
|
ca = "https://conda.anaconda.org"
|
||||||
|
|
||||||
for l in res["actions"]["LINK"]:
|
for to_link in res["actions"]["LINK"]:
|
||||||
assert l["channel"].startswith(f"{ca}/conda-forge/")
|
assert to_link["channel"].startswith(f"{ca}/conda-forge/")
|
||||||
assert l["url"].startswith(f"{ca}/conda-forge/")
|
assert to_link["url"].startswith(f"{ca}/conda-forge/")
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
|
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
|
||||||
|
reason="Running only ultra-dry tests",
|
||||||
)
|
)
|
||||||
def test_no_python_pinning(self, existing_cache):
|
def test_no_python_pinning(self, existing_cache):
|
||||||
install("python=3.9", no_dry_run=True)
|
helpers.install("python=3.9", no_dry_run=True)
|
||||||
res = install("setuptools=28.4.0", "--no-py-pin", "--json")
|
res = helpers.install("setuptools=28.4.0", "--no-py-pin", "--json")
|
||||||
|
|
||||||
keys = {"success", "prefix", "actions", "dry_run"}
|
keys = {"success", "prefix", "actions", "dry_run"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -449,9 +444,7 @@ class TestInstall:
|
||||||
action_keys = {"LINK", "UNLINK", "PREFIX"}
|
action_keys = {"LINK", "UNLINK", "PREFIX"}
|
||||||
assert action_keys.issubset(set(res["actions"].keys()))
|
assert action_keys.issubset(set(res["actions"].keys()))
|
||||||
|
|
||||||
expected_link_packages = (
|
expected_link_packages = {"python"} if os.name == "nt" else {"python", "python_abi"}
|
||||||
{"python"} if os.name == "nt" else {"python", "python_abi"}
|
|
||||||
)
|
|
||||||
link_packages = {pkg["name"] for pkg in res["actions"]["LINK"]}
|
link_packages = {pkg["name"] for pkg in res["actions"]["LINK"]}
|
||||||
assert expected_link_packages.issubset(link_packages)
|
assert expected_link_packages.issubset(link_packages)
|
||||||
unlink_packages = {pkg["name"] for pkg in res["actions"]["UNLINK"]}
|
unlink_packages = {pkg["name"] for pkg in res["actions"]["UNLINK"]}
|
||||||
|
@ -464,26 +457,28 @@ class TestInstall:
|
||||||
assert py_pkg["version"].startswith("3.9")
|
assert py_pkg["version"].startswith("3.9")
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
|
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
|
||||||
|
reason="Running only ultra-dry tests",
|
||||||
)
|
)
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="Python2 no available")
|
@pytest.mark.skipif(sys.platform == "win32", reason="Python2 no available")
|
||||||
def test_python_pinning(self, existing_cache):
|
def test_python_pinning(self, existing_cache):
|
||||||
"""Black fails to install as it is not available for pinned Python 2."""
|
"""Black fails to install as it is not available for pinned Python 2."""
|
||||||
res = install("python=2", "--json", no_dry_run=True)
|
res = helpers.install("python=2", "--json", no_dry_run=True)
|
||||||
assert res["success"]
|
assert res["success"]
|
||||||
# We do not have great way to check for the type of error for now
|
# We do not have great way to check for the type of error for now
|
||||||
try:
|
try:
|
||||||
install("black", "--py-pin", "--json")
|
helpers.install("black", "--py-pin", "--json")
|
||||||
assert False
|
assert False
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests"
|
helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY,
|
||||||
|
reason="Running only ultra-dry tests",
|
||||||
)
|
)
|
||||||
def test_freeze_installed(self, existing_cache):
|
def test_freeze_installed(self, existing_cache):
|
||||||
install("xtensor=0.20", no_dry_run=True)
|
helpers.install("xtensor=0.20", no_dry_run=True)
|
||||||
res = install("xframe", "--freeze-installed", "--json")
|
res = helpers.install("xframe", "--freeze-installed", "--json")
|
||||||
|
|
||||||
# without freeze installed, xframe 0.3.0 should be installed and xtensor updated to 0.21
|
# without freeze installed, xframe 0.3.0 should be installed and xtensor updated to 0.21
|
||||||
keys = {"success", "prefix", "actions", "dry_run"}
|
keys = {"success", "prefix", "actions", "dry_run"}
|
||||||
|
@ -498,9 +493,7 @@ class TestInstall:
|
||||||
assert res["actions"]["LINK"][0]["version"] == "0.2.0"
|
assert res["actions"]["LINK"][0]["version"] == "0.2.0"
|
||||||
|
|
||||||
def test_channel_specific(self, existing_cache):
|
def test_channel_specific(self, existing_cache):
|
||||||
res = install(
|
res = helpers.install("conda-forge::xtensor", "--json", default_channel=False, no_rc=True)
|
||||||
"conda-forge::xtensor", "--json", default_channel=False, no_rc=True
|
|
||||||
)
|
|
||||||
|
|
||||||
keys = {"success", "prefix", "actions", "dry_run"}
|
keys = {"success", "prefix", "actions", "dry_run"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -516,12 +509,11 @@ class TestInstall:
|
||||||
assert pkg["channel"].startswith("https://conda.anaconda.org/conda-forge/")
|
assert pkg["channel"].startswith("https://conda.anaconda.org/conda-forge/")
|
||||||
|
|
||||||
def test_explicit_noarch(self, existing_cache):
|
def test_explicit_noarch(self, existing_cache):
|
||||||
install("python", no_dry_run=True)
|
helpers.install("python", no_dry_run=True)
|
||||||
|
|
||||||
channel = "https://conda.anaconda.org/conda-forge/noarch/"
|
channel = "https://conda.anaconda.org/conda-forge/noarch/"
|
||||||
explicit_spec = (
|
explicit_spec = (
|
||||||
channel
|
channel + "appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b"
|
||||||
+ "appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b"
|
|
||||||
)
|
)
|
||||||
file_content = ["@EXPLICIT", explicit_spec]
|
file_content = ["@EXPLICIT", explicit_spec]
|
||||||
|
|
||||||
|
@ -531,9 +523,9 @@ class TestInstall:
|
||||||
|
|
||||||
cmd = ("-p", TestInstall.prefix, "-q", "-f", spec_file)
|
cmd = ("-p", TestInstall.prefix, "-q", "-f", spec_file)
|
||||||
|
|
||||||
install(*cmd, default_channel=False)
|
helpers.install(*cmd, default_channel=False)
|
||||||
|
|
||||||
list_res = umamba_list("-p", TestInstall.prefix, "--json")
|
list_res = helpers.umamba_list("-p", TestInstall.prefix, "--json")
|
||||||
pkgs = [p for p in list_res if p["name"] == "appdirs"]
|
pkgs = [p for p in list_res if p["name"] == "appdirs"]
|
||||||
assert len(pkgs) == 1
|
assert len(pkgs) == 1
|
||||||
pkg = pkgs[0]
|
pkg = pkgs[0]
|
||||||
|
@ -541,33 +533,31 @@ class TestInstall:
|
||||||
assert pkg["build_string"] == "pyh9f0ad1d_0"
|
assert pkg["build_string"] == "pyh9f0ad1d_0"
|
||||||
|
|
||||||
def test_broken_package_name(self):
|
def test_broken_package_name(self):
|
||||||
non_existing_url = (
|
non_existing_url = "https://026e9ab9-6b46-4285-ae0d-427553801720.de/mypackage.tar.bz2"
|
||||||
"https://026e9ab9-6b46-4285-ae0d-427553801720.de/mypackage.tar.bz2"
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
res = install(non_existing_url, default_channel=False)
|
helpers.install(non_existing_url, default_channel=False)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
assert "Invalid package filename" in e.stderr.decode("utf-8")
|
assert "Invalid package filename" in e.stderr.decode("utf-8")
|
||||||
|
|
||||||
def test_no_reinstall(self, existing_cache):
|
def test_no_reinstall(self, existing_cache):
|
||||||
"""Reinstalling is a no op."""
|
"""Reinstalling is a no op."""
|
||||||
res = install("xtensor", "--json")
|
res = helpers.install("xtensor", "--json")
|
||||||
assert "xtensor" in {pkg["name"] for pkg in res["actions"]["LINK"]}
|
assert "xtensor" in {pkg["name"] for pkg in res["actions"]["LINK"]}
|
||||||
|
|
||||||
reinstall_res = install("xtensor", "--json")
|
reinstall_res = helpers.install("xtensor", "--json")
|
||||||
assert "actions" not in reinstall_res
|
assert "actions" not in reinstall_res
|
||||||
|
|
||||||
def test_force_reinstall(self, existing_cache):
|
def test_force_reinstall(self, existing_cache):
|
||||||
"""Force reinstall installs existing package again."""
|
"""Force reinstall installs existing package again."""
|
||||||
res = install("xtensor", "--json")
|
res = helpers.install("xtensor", "--json")
|
||||||
assert "xtensor" in {pkg["name"] for pkg in res["actions"]["LINK"]}
|
assert "xtensor" in {pkg["name"] for pkg in res["actions"]["LINK"]}
|
||||||
|
|
||||||
reinstall_res = install("xtensor", "--force-reinstall", "--json")
|
reinstall_res = helpers.install("xtensor", "--force-reinstall", "--json")
|
||||||
assert "xtensor" in {pkg["name"] for pkg in reinstall_res["actions"]["LINK"]}
|
assert "xtensor" in {pkg["name"] for pkg in reinstall_res["actions"]["LINK"]}
|
||||||
|
|
||||||
def test_force_reinstall_not_installed(self, existing_cache):
|
def test_force_reinstall_not_installed(self, existing_cache):
|
||||||
"""Force reinstall on non-installed packages is valid."""
|
"""Force reinstall on non-installed packages is valid."""
|
||||||
reinstall_res = install("xtensor", "--force-reinstall", "--json")
|
reinstall_res = helpers.install("xtensor", "--force-reinstall", "--json")
|
||||||
assert "xtensor" in {pkg["name"] for pkg in reinstall_res["actions"]["LINK"]}
|
assert "xtensor" in {pkg["name"] for pkg in reinstall_res["actions"]["LINK"]}
|
||||||
|
|
||||||
|
|
||||||
|
@ -575,13 +565,13 @@ def test_install_check_dirs(tmp_home, tmp_root_prefix):
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
env_prefix = tmp_root_prefix / "envs" / env_name
|
env_prefix = tmp_root_prefix / "envs" / env_name
|
||||||
|
|
||||||
create("-n", env_name, "python=3.8")
|
helpers.create("-n", env_name, "python=3.8")
|
||||||
res = install("-n", env_name, "nodejs", "--json")
|
res = helpers.install("-n", env_name, "nodejs", "--json")
|
||||||
|
|
||||||
assert os.path.isdir(env_prefix)
|
assert os.path.isdir(env_prefix)
|
||||||
assert "nodejs" in {pkg["name"] for pkg in res["actions"]["LINK"]}
|
assert "nodejs" in {pkg["name"] for pkg in res["actions"]["LINK"]}
|
||||||
|
|
||||||
if platform.system() == "Windows":
|
if helpers.platform.system() == "Windows":
|
||||||
assert os.path.isdir(env_prefix / "lib" / "site-packages")
|
assert os.path.isdir(env_prefix / "lib" / "site-packages")
|
||||||
else:
|
else:
|
||||||
assert os.path.isdir(env_prefix / "lib" / "python3.8" / "site-packages")
|
assert os.path.isdir(env_prefix / "lib" / "python3.8" / "site-packages")
|
||||||
|
@ -589,12 +579,12 @@ def test_install_check_dirs(tmp_home, tmp_root_prefix):
|
||||||
|
|
||||||
def test_track_features(tmp_home, tmp_root_prefix):
|
def test_track_features(tmp_home, tmp_root_prefix):
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
env_prefix = tmp_root_prefix / "envs" / env_name
|
tmp_root_prefix / "envs" / env_name
|
||||||
|
|
||||||
# should install CPython since PyPy has track features
|
# should install CPython since PyPy has track features
|
||||||
version = "3.7.9"
|
version = "3.7.9"
|
||||||
create("-n", env_name, default_channel=False, no_rc=False)
|
helpers.create("-n", env_name, default_channel=False, no_rc=False)
|
||||||
install(
|
helpers.install(
|
||||||
"-n",
|
"-n",
|
||||||
env_name,
|
env_name,
|
||||||
"-q",
|
"-q",
|
||||||
|
@ -602,20 +592,20 @@ def test_track_features(tmp_home, tmp_root_prefix):
|
||||||
"--strict-channel-priority",
|
"--strict-channel-priority",
|
||||||
no_rc=False,
|
no_rc=False,
|
||||||
)
|
)
|
||||||
res = umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
||||||
if platform.system() == "Windows":
|
if helpers.platform.system() == "Windows":
|
||||||
assert res.strip().startswith(version)
|
assert res.strip().startswith(version)
|
||||||
assert "[MSC v." in res.strip()
|
assert "[MSC v." in res.strip()
|
||||||
elif platform.system() == "Linux":
|
elif helpers.platform.system() == "Linux":
|
||||||
assert res.strip().startswith(version)
|
assert res.strip().startswith(version)
|
||||||
assert "[GCC" in res.strip()
|
assert "[GCC" in res.strip()
|
||||||
else:
|
else:
|
||||||
assert res.strip().startswith(version)
|
assert res.strip().startswith(version)
|
||||||
assert "[Clang" in res.strip()
|
assert "[Clang" in res.strip()
|
||||||
|
|
||||||
if platform.system() == "Linux":
|
if helpers.platform.system() == "Linux":
|
||||||
# now force PyPy install
|
# now force PyPy install
|
||||||
install(
|
helpers.install(
|
||||||
"-n",
|
"-n",
|
||||||
env_name,
|
env_name,
|
||||||
"-q",
|
"-q",
|
||||||
|
@ -623,9 +613,7 @@ def test_track_features(tmp_home, tmp_root_prefix):
|
||||||
"--strict-channel-priority",
|
"--strict-channel-priority",
|
||||||
no_rc=False,
|
no_rc=False,
|
||||||
)
|
)
|
||||||
res = umamba_run(
|
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
||||||
"-n", env_name, "python", "-c", "import sys; print(sys.version)"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert res.strip().startswith(version)
|
assert res.strip().startswith(version)
|
||||||
assert "[PyPy" in res.strip()
|
assert "[PyPy" in res.strip()
|
||||||
|
@ -633,11 +621,11 @@ def test_track_features(tmp_home, tmp_root_prefix):
|
||||||
|
|
||||||
def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
|
def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
|
||||||
env_name = "myenv"
|
env_name = "myenv"
|
||||||
env_prefix = tmp_root_prefix / "envs" / env_name
|
tmp_root_prefix / "envs" / env_name
|
||||||
|
|
||||||
version = "3.8"
|
version = "3.8"
|
||||||
create("-n", env_name, default_channel=False, no_rc=False)
|
helpers.create("-n", env_name, default_channel=False, no_rc=False)
|
||||||
install(
|
helpers.install(
|
||||||
"-n",
|
"-n",
|
||||||
env_name,
|
env_name,
|
||||||
"-q",
|
"-q",
|
||||||
|
@ -646,17 +634,15 @@ def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
|
||||||
no_rc=False,
|
no_rc=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
res = umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
||||||
assert version in res
|
assert version in res
|
||||||
|
|
||||||
res = umamba_run(
|
res = helpers.umamba_run("-n", env_name, "python", "-c", "import pip; print(pip.__version__)")
|
||||||
"-n", env_name, "python", "-c", "import pip; print(pip.__version__)"
|
|
||||||
)
|
|
||||||
assert len(res)
|
assert len(res)
|
||||||
|
|
||||||
# Update python version
|
# Update python version
|
||||||
version = "3.9"
|
version = "3.9"
|
||||||
install(
|
helpers.install(
|
||||||
"-n",
|
"-n",
|
||||||
env_name,
|
env_name,
|
||||||
"-q",
|
"-q",
|
||||||
|
@ -664,10 +650,8 @@ def test_reinstall_with_new_version(tmp_home, tmp_root_prefix):
|
||||||
no_rc=False,
|
no_rc=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
res = umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
res = helpers.umamba_run("-n", env_name, "python", "-c", "import sys; print(sys.version)")
|
||||||
assert version in res
|
assert version in res
|
||||||
|
|
||||||
res = umamba_run(
|
res = helpers.umamba_run("-n", env_name, "python", "-c", "import pip; print(pip.__version__)")
|
||||||
"-n", env_name, "python", "-c", "import pip; print(pip.__version__)"
|
|
||||||
)
|
|
||||||
assert len(res)
|
assert len(res)
|
||||||
|
|
|
@ -1,15 +1,12 @@
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import random
|
|
||||||
import shutil
|
|
||||||
import string
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .helpers import *
|
# Need to import everything to get fixtures
|
||||||
|
from .helpers import * # noqa: F403
|
||||||
|
from . import helpers
|
||||||
|
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
xtensor_hpp = "Library/include/xtensor/xtensor.hpp"
|
xtensor_hpp = "Library/include/xtensor/xtensor.hpp"
|
||||||
|
@ -21,8 +18,8 @@ class TestLinking:
|
||||||
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -35,17 +32,17 @@ class TestLinking:
|
||||||
os.environ["CONDA_PREFIX"] = TestLinking.current_prefix
|
os.environ["CONDA_PREFIX"] = TestLinking.current_prefix
|
||||||
|
|
||||||
if Path(TestLinking.root_prefix).exists():
|
if Path(TestLinking.root_prefix).exists():
|
||||||
rmtree(TestLinking.root_prefix)
|
helpers.rmtree(TestLinking.root_prefix)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def teardown_method(cls):
|
def teardown_method(cls):
|
||||||
if Path(TestLinking.prefix).exists():
|
if Path(TestLinking.prefix).exists():
|
||||||
rmtree(TestLinking.prefix)
|
helpers.rmtree(TestLinking.prefix)
|
||||||
|
|
||||||
def test_link(self, existing_cache, test_pkg):
|
def test_link(self, existing_cache, test_pkg):
|
||||||
create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
|
helpers.create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
|
||||||
|
|
||||||
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
|
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
|
||||||
assert linked_file.exists()
|
assert linked_file.exists()
|
||||||
assert not linked_file.is_symlink()
|
assert not linked_file.is_symlink()
|
||||||
|
|
||||||
|
@ -54,7 +51,7 @@ class TestLinking:
|
||||||
assert cache_file.stat().st_ino == linked_file.stat().st_ino
|
assert cache_file.stat().st_ino == linked_file.stat().st_ino
|
||||||
|
|
||||||
def test_copy(self, existing_cache, test_pkg):
|
def test_copy(self, existing_cache, test_pkg):
|
||||||
create(
|
helpers.create(
|
||||||
"xtensor",
|
"xtensor",
|
||||||
"-n",
|
"-n",
|
||||||
TestLinking.env_name,
|
TestLinking.env_name,
|
||||||
|
@ -62,7 +59,7 @@ class TestLinking:
|
||||||
"--always-copy",
|
"--always-copy",
|
||||||
no_dry_run=True,
|
no_dry_run=True,
|
||||||
)
|
)
|
||||||
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
|
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
|
||||||
assert linked_file.exists()
|
assert linked_file.exists()
|
||||||
assert not linked_file.is_symlink()
|
assert not linked_file.is_symlink()
|
||||||
|
|
||||||
|
@ -75,7 +72,7 @@ class TestLinking:
|
||||||
reason="Softlinking needs admin privileges on win",
|
reason="Softlinking needs admin privileges on win",
|
||||||
)
|
)
|
||||||
def test_always_softlink(self, existing_cache, test_pkg):
|
def test_always_softlink(self, existing_cache, test_pkg):
|
||||||
create(
|
helpers.create(
|
||||||
"xtensor",
|
"xtensor",
|
||||||
"-n",
|
"-n",
|
||||||
TestLinking.env_name,
|
TestLinking.env_name,
|
||||||
|
@ -83,7 +80,7 @@ class TestLinking:
|
||||||
"--always-softlink",
|
"--always-softlink",
|
||||||
no_dry_run=True,
|
no_dry_run=True,
|
||||||
)
|
)
|
||||||
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
|
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
|
||||||
|
|
||||||
assert linked_file.exists()
|
assert linked_file.exists()
|
||||||
assert linked_file.is_symlink()
|
assert linked_file.is_symlink()
|
||||||
|
@ -105,15 +102,13 @@ class TestLinking:
|
||||||
create_args.append("--allow-softlinks")
|
create_args.append("--allow-softlinks")
|
||||||
if always_copy:
|
if always_copy:
|
||||||
create_args.append("--always-copy")
|
create_args.append("--always-copy")
|
||||||
create(*create_args, no_dry_run=True)
|
helpers.create(*create_args, no_dry_run=True)
|
||||||
|
|
||||||
same_device = (
|
same_device = existing_cache.stat().st_dev == Path(TestLinking.prefix).stat().st_dev
|
||||||
existing_cache.stat().st_dev == Path(TestLinking.prefix).stat().st_dev
|
|
||||||
)
|
|
||||||
is_softlink = not same_device and allow_softlinks and not always_copy
|
is_softlink = not same_device and allow_softlinks and not always_copy
|
||||||
is_hardlink = same_device and not always_copy
|
is_hardlink = same_device and not always_copy
|
||||||
|
|
||||||
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
|
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
|
||||||
assert linked_file.exists()
|
assert linked_file.exists()
|
||||||
|
|
||||||
cache_file = existing_cache / test_pkg / xtensor_hpp
|
cache_file = existing_cache / test_pkg / xtensor_hpp
|
||||||
|
@ -122,16 +117,14 @@ class TestLinking:
|
||||||
assert linked_file.is_symlink() == is_softlink
|
assert linked_file.is_symlink() == is_softlink
|
||||||
|
|
||||||
def test_unlink_missing_file(self):
|
def test_unlink_missing_file(self):
|
||||||
create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
|
helpers.create("xtensor", "-n", TestLinking.env_name, "--json", no_dry_run=True)
|
||||||
|
|
||||||
linked_file = get_env(TestLinking.env_name, xtensor_hpp)
|
linked_file = helpers.get_env(TestLinking.env_name, xtensor_hpp)
|
||||||
assert linked_file.exists()
|
assert linked_file.exists()
|
||||||
assert not linked_file.is_symlink()
|
assert not linked_file.is_symlink()
|
||||||
|
|
||||||
os.remove(linked_file)
|
os.remove(linked_file)
|
||||||
remove("xtensor", "-n", TestLinking.env_name)
|
helpers.remove("xtensor", "-n", TestLinking.env_name)
|
||||||
|
|
||||||
def test_link_missing_scripts_dir(self): # issue 2808
|
def test_link_missing_scripts_dir(self): # issue 2808
|
||||||
create(
|
helpers.create("python=3.7", "pypy", "-n", TestLinking.env_name, "--json", no_dry_run=True)
|
||||||
"python=3.7", "pypy", "-n", TestLinking.env_name, "--json", no_dry_run=True
|
|
||||||
)
|
|
||||||
|
|
|
@ -8,9 +8,7 @@ from . import helpers
|
||||||
@pytest.mark.parametrize("quiet_flag", ["", "-q", "--quiet"])
|
@pytest.mark.parametrize("quiet_flag", ["", "-q", "--quiet"])
|
||||||
@pytest.mark.parametrize("env_selector", ["", "name", "prefix"])
|
@pytest.mark.parametrize("env_selector", ["", "name", "prefix"])
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_list(
|
def test_list(tmp_home, tmp_root_prefix, tmp_env_name, tmp_xtensor_env, env_selector, quiet_flag):
|
||||||
tmp_home, tmp_root_prefix, tmp_env_name, tmp_xtensor_env, env_selector, quiet_flag
|
|
||||||
):
|
|
||||||
if env_selector == "prefix":
|
if env_selector == "prefix":
|
||||||
res = helpers.umamba_list("-p", tmp_xtensor_env, "--json", quiet_flag)
|
res = helpers.umamba_list("-p", tmp_xtensor_env, "--json", quiet_flag)
|
||||||
elif env_selector == "name":
|
elif env_selector == "name":
|
||||||
|
|
|
@ -209,9 +209,7 @@ env_file_content = """
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("user,password", [["testuser", "xyzpass"]])
|
@pytest.mark.parametrize("user,password", [["testuser", "xyzpass"]])
|
||||||
def test_basic_auth_explicit_txt(
|
def test_basic_auth_explicit_txt(auth_file, user, password, basic_auth_server, tmp_path):
|
||||||
auth_file, user, password, basic_auth_server, tmp_path
|
|
||||||
):
|
|
||||||
login(basic_auth_server, "--username", user, "--password", password)
|
login(basic_auth_server, "--username", user, "--password", password)
|
||||||
|
|
||||||
env_file = tmp_path / "environment.txt"
|
env_file = tmp_path / "environment.txt"
|
||||||
|
@ -224,9 +222,7 @@ def test_basic_auth_explicit_txt(
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("user,password", [["testuser", "xyzpass"]])
|
@pytest.mark.parametrize("user,password", [["testuser", "xyzpass"]])
|
||||||
def test_basic_auth_explicit_yaml(
|
def test_basic_auth_explicit_yaml(auth_file, user, password, basic_auth_server, tmp_path):
|
||||||
auth_file, user, password, basic_auth_server, tmp_path
|
|
||||||
):
|
|
||||||
login(basic_auth_server, "--username", user, "--password", password)
|
login(basic_auth_server, "--username", user, "--password", password)
|
||||||
|
|
||||||
env_file = tmp_path / "environment.yml"
|
env_file = tmp_path / "environment.yml"
|
||||||
|
|
|
@ -5,7 +5,7 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .helpers import create, get_env, get_umamba, random_string, remove, umamba_list
|
from .helpers import create, random_string, remove
|
||||||
|
|
||||||
if sys.platform.startswith("win"):
|
if sys.platform.startswith("win"):
|
||||||
import menuinst
|
import menuinst
|
||||||
|
@ -44,9 +44,7 @@ class TestMenuinst:
|
||||||
assert shortcut.TargetPath.lower() == os.getenv("COMSPEC").lower()
|
assert shortcut.TargetPath.lower() == os.getenv("COMSPEC").lower()
|
||||||
icon_location = shortcut.IconLocation
|
icon_location = shortcut.IconLocation
|
||||||
icon_location_path, icon_location_index = icon_location.split(",")
|
icon_location_path, icon_location_index = icon_location.split(",")
|
||||||
assert Path(icon_location_path) == (
|
assert Path(icon_location_path) == (Path(prefix) / "Menu" / "console_shortcut.ico")
|
||||||
Path(prefix) / "Menu" / "console_shortcut.ico"
|
|
||||||
)
|
|
||||||
assert icon_location_index == "0"
|
assert icon_location_index == "0"
|
||||||
|
|
||||||
assert shortcut.Description == "Miniforge Prompt (" + env_name + ")"
|
assert shortcut.Description == "Miniforge Prompt (" + env_name + ")"
|
||||||
|
@ -82,9 +80,7 @@ class TestMenuinst:
|
||||||
|
|
||||||
icon_location = shortcut.IconLocation
|
icon_location = shortcut.IconLocation
|
||||||
icon_location_path, icon_location_index = icon_location.split(",")
|
icon_location_path, icon_location_index = icon_location.split(",")
|
||||||
assert Path(icon_location_path) == (
|
assert Path(icon_location_path) == (Path(prefix) / "Menu" / "console_shortcut.ico")
|
||||||
Path(prefix) / "Menu" / "console_shortcut.ico"
|
|
||||||
)
|
|
||||||
assert icon_location_index == "0"
|
assert icon_location_index == "0"
|
||||||
|
|
||||||
assert shortcut.Description == "Miniforge Prompt (" + env_name + ")"
|
assert shortcut.Description == "Miniforge Prompt (" + env_name + ")"
|
||||||
|
|
|
@ -10,7 +10,7 @@ import pytest
|
||||||
import zstandard
|
import zstandard
|
||||||
from conda_package_handling import api as cph
|
from conda_package_handling import api as cph
|
||||||
|
|
||||||
from .helpers import *
|
from . import helpers
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -37,7 +37,7 @@ def test_extract(cph_test_file: Path, tmp_path: Path):
|
||||||
shutil.copy(cph_test_file, tmp_path / "mm")
|
shutil.copy(cph_test_file, tmp_path / "mm")
|
||||||
shutil.copy(cph_test_file, tmp_path / "cph")
|
shutil.copy(cph_test_file, tmp_path / "cph")
|
||||||
|
|
||||||
mamba_exe = get_umamba()
|
mamba_exe = helpers.get_umamba()
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
[
|
[
|
||||||
mamba_exe,
|
mamba_exe,
|
||||||
|
@ -52,21 +52,13 @@ def test_extract(cph_test_file: Path, tmp_path: Path):
|
||||||
dest_dir=str(tmp_path / "cph" / "cph_test_data-0.0.1-0"),
|
dest_dir=str(tmp_path / "cph" / "cph_test_data-0.0.1-0"),
|
||||||
)
|
)
|
||||||
|
|
||||||
conda = set(
|
conda = set((p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*")))
|
||||||
(p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*"))
|
mamba = set((p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*")))
|
||||||
)
|
|
||||||
mamba = set(
|
|
||||||
(p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*"))
|
|
||||||
)
|
|
||||||
assert conda == mamba
|
assert conda == mamba
|
||||||
|
|
||||||
extracted = cph_test_file.name.removesuffix(".tar.bz2")
|
extracted = cph_test_file.name.removesuffix(".tar.bz2")
|
||||||
fcmp = filecmp.dircmp(tmp_path / "cph" / extracted, tmp_path / "mm" / extracted)
|
fcmp = filecmp.dircmp(tmp_path / "cph" / extracted, tmp_path / "mm" / extracted)
|
||||||
assert (
|
assert len(fcmp.left_only) == 0 and len(fcmp.right_only) == 0 and len(fcmp.diff_files) == 0
|
||||||
len(fcmp.left_only) == 0
|
|
||||||
and len(fcmp.right_only) == 0
|
|
||||||
and len(fcmp.diff_files) == 0
|
|
||||||
)
|
|
||||||
# fcmp.report_full_closure()
|
# fcmp.report_full_closure()
|
||||||
|
|
||||||
|
|
||||||
|
@ -101,8 +93,8 @@ def compare_two_tarfiles(tar1, tar2):
|
||||||
assert m1.linkname == m2.linkname
|
assert m1.linkname == m2.linkname
|
||||||
|
|
||||||
|
|
||||||
def assert_sorted(l):
|
def assert_sorted(seq):
|
||||||
assert l == sorted(l)
|
assert seq == sorted(seq)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_compress(cph_test_file: Path, tmp_path: Path):
|
def test_extract_compress(cph_test_file: Path, tmp_path: Path):
|
||||||
|
@ -110,7 +102,7 @@ def test_extract_compress(cph_test_file: Path, tmp_path: Path):
|
||||||
|
|
||||||
shutil.copy(cph_test_file, tmp_path / "mm")
|
shutil.copy(cph_test_file, tmp_path / "mm")
|
||||||
|
|
||||||
mamba_exe = get_umamba()
|
mamba_exe = helpers.get_umamba()
|
||||||
out = tmp_path / "mm" / "out"
|
out = tmp_path / "mm" / "out"
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
[
|
[
|
||||||
|
@ -131,9 +123,7 @@ def test_extract_compress(cph_test_file: Path, tmp_path: Path):
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
compare_two_tarfiles(
|
compare_two_tarfiles(tarfile.open(cph_test_file), tarfile.open(tmp_path / "mm" / "out.tar.bz2"))
|
||||||
tarfile.open(cph_test_file), tarfile.open(tmp_path / "mm" / "out.tar.bz2")
|
|
||||||
)
|
|
||||||
|
|
||||||
fout = tarfile.open(tmp_path / "mm" / "out.tar.bz2")
|
fout = tarfile.open(tmp_path / "mm" / "out.tar.bz2")
|
||||||
names = fout.getnames()
|
names = fout.getnames()
|
||||||
|
@ -155,10 +145,8 @@ def test_transmute(cph_test_file: Path, tmp_path: Path):
|
||||||
shutil.copy(cph_test_file, tmp_path)
|
shutil.copy(cph_test_file, tmp_path)
|
||||||
shutil.copy(tmp_path / cph_test_file.name, tmp_path / "mm")
|
shutil.copy(tmp_path / cph_test_file.name, tmp_path / "mm")
|
||||||
|
|
||||||
mamba_exe = get_umamba()
|
mamba_exe = helpers.get_umamba()
|
||||||
subprocess.call(
|
subprocess.call([mamba_exe, "package", "transmute", str(tmp_path / "mm" / cph_test_file.name)])
|
||||||
[mamba_exe, "package", "transmute", str(tmp_path / "mm" / cph_test_file.name)]
|
|
||||||
)
|
|
||||||
failed_files = cph.transmute(
|
failed_files = cph.transmute(
|
||||||
str(tmp_path / cph_test_file.name), ".conda", out_folder=str(tmp_path / "cph")
|
str(tmp_path / cph_test_file.name), ".conda", out_folder=str(tmp_path / "cph")
|
||||||
)
|
)
|
||||||
|
@ -169,27 +157,23 @@ def test_transmute(cph_test_file: Path, tmp_path: Path):
|
||||||
cph.extract(str(tmp_path / "cph" / as_conda))
|
cph.extract(str(tmp_path / "cph" / as_conda))
|
||||||
cph.extract(str(tmp_path / "mm" / as_conda))
|
cph.extract(str(tmp_path / "mm" / as_conda))
|
||||||
|
|
||||||
conda = list((tmp_path / "cph").rglob("**/*"))
|
list((tmp_path / "cph").rglob("**/*"))
|
||||||
mamba = list((tmp_path / "mm").rglob("**/*"))
|
list((tmp_path / "mm").rglob("**/*"))
|
||||||
|
|
||||||
fcmp = filecmp.dircmp(
|
fcmp = filecmp.dircmp(
|
||||||
tmp_path / "cph" / "cph_test_data-0.0.1-0",
|
tmp_path / "cph" / "cph_test_data-0.0.1-0",
|
||||||
tmp_path / "mm" / "cph_test_data-0.0.1-0",
|
tmp_path / "mm" / "cph_test_data-0.0.1-0",
|
||||||
)
|
)
|
||||||
assert (
|
assert len(fcmp.left_only) == 0 and len(fcmp.right_only) == 0 and len(fcmp.diff_files) == 0
|
||||||
len(fcmp.left_only) == 0
|
|
||||||
and len(fcmp.right_only) == 0
|
|
||||||
and len(fcmp.diff_files) == 0
|
|
||||||
)
|
|
||||||
# fcmp.report_full_closure()
|
# fcmp.report_full_closure()
|
||||||
|
|
||||||
# extract zipfile
|
# extract zipfile
|
||||||
with zipfile.ZipFile(tmp_path / "mm" / as_conda, "r") as zip_ref:
|
with zipfile.ZipFile(tmp_path / "mm" / as_conda, "r") as zip_ref:
|
||||||
l = zip_ref.namelist()
|
names = zip_ref.namelist()
|
||||||
|
|
||||||
assert l[2].startswith("info-")
|
assert names[2].startswith("info-")
|
||||||
assert l[0] == "metadata.json"
|
assert names[0] == "metadata.json"
|
||||||
assert l[1].startswith("pkg-")
|
assert names[1].startswith("pkg-")
|
||||||
|
|
||||||
zip_ref.extractall(tmp_path / "mm" / "zipcontents")
|
zip_ref.extractall(tmp_path / "mm" / "zipcontents")
|
||||||
|
|
||||||
|
|
|
@ -85,9 +85,7 @@ def tmp_cache_xtensor_hpp(tmp_cache_xtensor_dir: Path) -> Path:
|
||||||
|
|
||||||
|
|
||||||
class TestPkgCache:
|
class TestPkgCache:
|
||||||
def test_extracted_file_deleted(
|
def test_extracted_file_deleted(self, tmp_home, tmp_cache_xtensor_hpp, tmp_root_prefix):
|
||||||
self, tmp_home, tmp_cache_xtensor_hpp, tmp_root_prefix
|
|
||||||
):
|
|
||||||
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
|
old_ino = tmp_cache_xtensor_hpp.stat().st_ino
|
||||||
os.remove(tmp_cache_xtensor_hpp)
|
os.remove(tmp_cache_xtensor_hpp)
|
||||||
|
|
||||||
|
@ -232,17 +230,13 @@ class TestPkgCache:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def tmp_cache_alt(tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path) -> Path:
|
def tmp_cache_alt(tmp_root_prefix: Path, tmp_shared_cache_xtensor: Path) -> Path:
|
||||||
"""Make an alternative package cache outside the root prefix."""
|
"""Make an alternative package cache outside the root prefix."""
|
||||||
cache = (
|
cache = tmp_root_prefix / "more-pkgs" # Creating under root prefix to leverage eager cleanup
|
||||||
tmp_root_prefix / "more-pkgs"
|
|
||||||
) # Creating under root prefix to leverage eager cleanup
|
|
||||||
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
|
shutil.copytree(tmp_shared_cache_xtensor, cache, dirs_exist_ok=True)
|
||||||
return cache
|
return cache
|
||||||
|
|
||||||
|
|
||||||
def repodata_json(cache: Path) -> set[Path]:
|
def repodata_json(cache: Path) -> set[Path]:
|
||||||
return set((cache / "cache").glob("*.json")) - set(
|
return set((cache / "cache").glob("*.json")) - set((cache / "cache").glob("*.state.json"))
|
||||||
(cache / "cache").glob("*.state.json")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def repodata_solv(cache: Path) -> set[Path]:
|
def repodata_solv(cache: Path) -> set[Path]:
|
||||||
|
@ -250,15 +244,11 @@ def repodata_solv(cache: Path) -> set[Path]:
|
||||||
|
|
||||||
|
|
||||||
def same_repodata_json_solv(cache: Path):
|
def same_repodata_json_solv(cache: Path):
|
||||||
return {p.stem for p in repodata_json(cache)} == {
|
return {p.stem for p in repodata_json(cache)} == {p.stem for p in repodata_solv(cache)}
|
||||||
p.stem for p in repodata_solv(cache)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class TestMultiplePkgCaches:
|
class TestMultiplePkgCaches:
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize("cache", (pytest.lazy_fixture(("tmp_cache", "tmp_cache_alt"))))
|
||||||
"cache", (pytest.lazy_fixture(("tmp_cache", "tmp_cache_alt")))
|
|
||||||
)
|
|
||||||
def test_different_caches(self, tmp_home, tmp_root_prefix, cache):
|
def test_different_caches(self, tmp_home, tmp_root_prefix, cache):
|
||||||
os.environ["CONDA_PKGS_DIRS"] = f"{cache}"
|
os.environ["CONDA_PKGS_DIRS"] = f"{cache}"
|
||||||
env_name = "some_env"
|
env_name = "some_env"
|
||||||
|
@ -312,12 +302,8 @@ class TestMultiplePkgCaches:
|
||||||
|
|
||||||
helpers.create("-n", "myenv", "xtensor", "--json", no_dry_run=True)
|
helpers.create("-n", "myenv", "xtensor", "--json", no_dry_run=True)
|
||||||
|
|
||||||
def test_no_writable_extracted_dir_corrupted(
|
def test_no_writable_extracted_dir_corrupted(self, tmp_home, tmp_root_prefix, tmp_cache):
|
||||||
self, tmp_home, tmp_root_prefix, tmp_cache
|
(tmp_cache / find_pkg_build(tmp_cache, "xtensor") / helpers.xtensor_hpp).unlink()
|
||||||
):
|
|
||||||
(
|
|
||||||
tmp_cache / find_pkg_build(tmp_cache, "xtensor") / helpers.xtensor_hpp
|
|
||||||
).unlink()
|
|
||||||
helpers.recursive_chmod(tmp_cache, 0o500)
|
helpers.recursive_chmod(tmp_cache, 0o500)
|
||||||
|
|
||||||
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache}"
|
os.environ["CONDA_PKGS_DIRS"] = f"{tmp_cache}"
|
||||||
|
|
|
@ -1,28 +1,29 @@
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import random
|
|
||||||
import shutil
|
import shutil
|
||||||
import string
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .helpers import *
|
# Need to import everything to get fixtures
|
||||||
|
from .helpers import * # noqa: F403
|
||||||
|
from . import helpers
|
||||||
|
|
||||||
__this_dir__ = Path(__file__).parent.resolve()
|
__this_dir__ = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(dry_run_tests == DryRun.ULTRA_DRY, reason="Running ultra dry tests")
|
@pytest.mark.skipif(
|
||||||
|
helpers.dry_run_tests == helpers.DryRun.ULTRA_DRY, reason="Running ultra dry tests"
|
||||||
|
)
|
||||||
class TestRemove:
|
class TestRemove:
|
||||||
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
cache = os.path.join(current_root_prefix, "pkgs")
|
cache = os.path.join(current_root_prefix, "pkgs")
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -30,8 +31,8 @@ class TestRemove:
|
||||||
def root(existing_cache):
|
def root(existing_cache):
|
||||||
os.environ["MAMBA_ROOT_PREFIX"] = TestRemove.root_prefix
|
os.environ["MAMBA_ROOT_PREFIX"] = TestRemove.root_prefix
|
||||||
os.environ["CONDA_PREFIX"] = TestRemove.prefix
|
os.environ["CONDA_PREFIX"] = TestRemove.prefix
|
||||||
create("-n", "base", no_dry_run=True)
|
helpers.create("-n", "base", no_dry_run=True)
|
||||||
create("xtensor", "-n", TestRemove.env_name, no_dry_run=True)
|
helpers.create("xtensor", "-n", TestRemove.env_name, no_dry_run=True)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
@ -42,19 +43,19 @@ class TestRemove:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def env_created(root):
|
def env_created(root):
|
||||||
if dry_run_tests == DryRun.OFF:
|
if helpers.dry_run_tests == helpers.DryRun.OFF:
|
||||||
install("xtensor", "-n", TestRemove.env_name)
|
helpers.install("xtensor", "-n", TestRemove.env_name)
|
||||||
|
|
||||||
@pytest.mark.parametrize("env_selector", ["", "name", "prefix"])
|
@pytest.mark.parametrize("env_selector", ["", "name", "prefix"])
|
||||||
def test_remove(self, env_selector, env_created):
|
def test_remove(self, env_selector, env_created):
|
||||||
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
|
env_pkgs = [p["name"] for p in helpers.umamba_list("-p", TestRemove.prefix, "--json")]
|
||||||
|
|
||||||
if env_selector == "prefix":
|
if env_selector == "prefix":
|
||||||
res = remove("xtensor", "-p", TestRemove.prefix, "--json")
|
res = helpers.remove("xtensor", "-p", TestRemove.prefix, "--json")
|
||||||
elif env_selector == "name":
|
elif env_selector == "name":
|
||||||
res = remove("xtensor", "-n", TestRemove.env_name, "--json")
|
res = helpers.remove("xtensor", "-n", TestRemove.env_name, "--json")
|
||||||
else:
|
else:
|
||||||
res = remove("xtensor", "--dry-run", "--json")
|
res = helpers.remove("xtensor", "--dry-run", "--json")
|
||||||
|
|
||||||
keys = {"dry_run", "success", "prefix", "actions"}
|
keys = {"dry_run", "success", "prefix", "actions"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -65,10 +66,10 @@ class TestRemove:
|
||||||
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
||||||
|
|
||||||
def test_remove_orphaned(self, env_created):
|
def test_remove_orphaned(self, env_created):
|
||||||
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
|
env_pkgs = [p["name"] for p in helpers.umamba_list("-p", TestRemove.prefix, "--json")]
|
||||||
install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
|
helpers.install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
|
||||||
|
|
||||||
res = remove("xframe", "-p", TestRemove.prefix, "--json")
|
res = helpers.remove("xframe", "-p", TestRemove.prefix, "--json")
|
||||||
|
|
||||||
keys = {"dry_run", "success", "prefix", "actions"}
|
keys = {"dry_run", "success", "prefix", "actions"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -77,13 +78,13 @@ class TestRemove:
|
||||||
assert res["actions"]["UNLINK"][0]["name"] == "xframe"
|
assert res["actions"]["UNLINK"][0]["name"] == "xframe"
|
||||||
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
||||||
|
|
||||||
res = remove("xtensor", "-p", TestRemove.prefix, "--json")
|
res = helpers.remove("xtensor", "-p", TestRemove.prefix, "--json")
|
||||||
|
|
||||||
keys = {"dry_run", "success", "prefix", "actions"}
|
keys = {"dry_run", "success", "prefix", "actions"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
assert res["success"]
|
assert res["success"]
|
||||||
assert len(res["actions"]["UNLINK"]) == len(env_pkgs) + (
|
assert len(res["actions"]["UNLINK"]) == len(env_pkgs) + (
|
||||||
1 if dry_run_tests == DryRun.DRY else 0
|
1 if helpers.dry_run_tests == helpers.DryRun.DRY else 0
|
||||||
)
|
)
|
||||||
for p in res["actions"]["UNLINK"]:
|
for p in res["actions"]["UNLINK"]:
|
||||||
assert p["name"] in env_pkgs
|
assert p["name"] in env_pkgs
|
||||||
|
@ -92,10 +93,9 @@ class TestRemove:
|
||||||
def test_remove_force(self, env_created):
|
def test_remove_force(self, env_created):
|
||||||
# check that we can remove a package without solving the environment (putting
|
# check that we can remove a package without solving the environment (putting
|
||||||
# it in a bad state, actually)
|
# it in a bad state, actually)
|
||||||
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
|
helpers.install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
|
||||||
install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
|
|
||||||
|
|
||||||
res = remove("xtl", "-p", TestRemove.prefix, "--json", "--force")
|
res = helpers.remove("xtl", "-p", TestRemove.prefix, "--json", "--force")
|
||||||
|
|
||||||
keys = {"dry_run", "success", "prefix", "actions"}
|
keys = {"dry_run", "success", "prefix", "actions"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -105,10 +105,9 @@ class TestRemove:
|
||||||
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
||||||
|
|
||||||
def test_remove_no_prune_deps(self, env_created):
|
def test_remove_no_prune_deps(self, env_created):
|
||||||
env_pkgs = [p["name"] for p in umamba_list("-p", TestRemove.prefix, "--json")]
|
helpers.install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
|
||||||
install("xframe", "-n", TestRemove.env_name, no_dry_run=True)
|
|
||||||
|
|
||||||
res = remove("xtensor", "-p", TestRemove.prefix, "--json", "--no-prune-deps")
|
res = helpers.remove("xtensor", "-p", TestRemove.prefix, "--json", "--no-prune-deps")
|
||||||
|
|
||||||
keys = {"dry_run", "success", "prefix", "actions"}
|
keys = {"dry_run", "success", "prefix", "actions"}
|
||||||
assert keys.issubset(set(res.keys()))
|
assert keys.issubset(set(res.keys()))
|
||||||
|
@ -120,24 +119,24 @@ class TestRemove:
|
||||||
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
assert res["actions"]["PREFIX"] == TestRemove.prefix
|
||||||
|
|
||||||
def test_remove_in_use(self, env_created):
|
def test_remove_in_use(self, env_created):
|
||||||
install("python=3.9", "-n", self.env_name, "--json", no_dry_run=True)
|
helpers.install("python=3.9", "-n", self.env_name, "--json", no_dry_run=True)
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
pyexe = Path(self.prefix) / "python.exe"
|
pyexe = Path(self.prefix) / "python.exe"
|
||||||
else:
|
else:
|
||||||
pyexe = Path(self.prefix) / "bin" / "python"
|
pyexe = Path(self.prefix) / "bin" / "python"
|
||||||
|
|
||||||
env = get_fake_activate(self.prefix)
|
env = helpers.get_fake_activate(self.prefix)
|
||||||
|
|
||||||
pyproc = subprocess.Popen(
|
pyproc = subprocess.Popen(
|
||||||
pyexe, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env
|
pyexe, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env
|
||||||
)
|
)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
res = remove("python", "-v", "-p", self.prefix, no_dry_run=True)
|
helpers.remove("python", "-v", "-p", self.prefix, no_dry_run=True)
|
||||||
|
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
pyexe_trash = Path(str(pyexe) + ".mamba_trash")
|
pyexe_trash = Path(str(pyexe) + ".mamba_trash")
|
||||||
assert pyexe.exists() == False
|
assert pyexe.exists() is False
|
||||||
pyexe_trash_exists = pyexe_trash.exists()
|
pyexe_trash_exists = pyexe_trash.exists()
|
||||||
trash_file = Path(self.prefix) / "conda-meta" / "mamba_trash.txt"
|
trash_file = Path(self.prefix) / "conda-meta" / "mamba_trash.txt"
|
||||||
|
|
||||||
|
@ -148,16 +147,16 @@ class TestRemove:
|
||||||
|
|
||||||
with open(trash_file, "r") as fi:
|
with open(trash_file, "r") as fi:
|
||||||
lines = [x.strip() for x in fi.readlines()]
|
lines = [x.strip() for x in fi.readlines()]
|
||||||
assert all([l.endswith(".mamba_trash") for l in lines])
|
assert all([line.endswith(".mamba_trash") for line in lines])
|
||||||
assert len(all_trash_files) == len(lines)
|
assert len(all_trash_files) == len(lines)
|
||||||
linesp = [Path(self.prefix) / l for l in lines]
|
linesp = [Path(self.prefix) / line for line in lines]
|
||||||
for atf in all_trash_files:
|
for atf in all_trash_files:
|
||||||
assert atf in linesp
|
assert atf in linesp
|
||||||
else:
|
else:
|
||||||
assert trash_file.exists() == False
|
assert trash_file.exists() is False
|
||||||
assert pyexe_trash.exists() == False
|
assert pyexe_trash.exists() is False
|
||||||
# No change if file still in use
|
# No change if file still in use
|
||||||
install("cpp-filesystem", "-n", self.env_name, "--json", no_dry_run=True)
|
helpers.install("cpp-filesystem", "-n", self.env_name, "--json", no_dry_run=True)
|
||||||
|
|
||||||
if pyexe_trash_exists:
|
if pyexe_trash_exists:
|
||||||
assert trash_file.exists()
|
assert trash_file.exists()
|
||||||
|
@ -165,24 +164,24 @@ class TestRemove:
|
||||||
|
|
||||||
with open(trash_file, "r") as fi:
|
with open(trash_file, "r") as fi:
|
||||||
lines = [x.strip() for x in fi.readlines()]
|
lines = [x.strip() for x in fi.readlines()]
|
||||||
assert all([l.endswith(".mamba_trash") for l in lines])
|
assert all([line.endswith(".mamba_trash") for line in lines])
|
||||||
assert len(all_trash_files) == len(lines)
|
assert len(all_trash_files) == len(lines)
|
||||||
linesp = [Path(self.prefix) / l for l in lines]
|
linesp = [Path(self.prefix) / line for line in lines]
|
||||||
for atf in all_trash_files:
|
for atf in all_trash_files:
|
||||||
assert atf in linesp
|
assert atf in linesp
|
||||||
else:
|
else:
|
||||||
assert trash_file.exists() == False
|
assert trash_file.exists() is False
|
||||||
assert pyexe_trash.exists() == False
|
assert pyexe_trash.exists() is False
|
||||||
|
|
||||||
subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=pyproc.pid))
|
subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=pyproc.pid))
|
||||||
# check that another env mod clears lingering trash files
|
# check that another env mod clears lingering trash files
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
install("xsimd", "-n", self.env_name, "--json", no_dry_run=True)
|
helpers.install("xsimd", "-n", self.env_name, "--json", no_dry_run=True)
|
||||||
assert trash_file.exists() == False
|
assert trash_file.exists() is False
|
||||||
assert pyexe_trash.exists() == False
|
assert pyexe_trash.exists() is False
|
||||||
|
|
||||||
else:
|
else:
|
||||||
assert pyexe.exists() == False
|
assert pyexe.exists() is False
|
||||||
pyproc.kill()
|
pyproc.kill()
|
||||||
|
|
||||||
|
|
||||||
|
@ -190,8 +189,8 @@ class TestRemoveConfig:
|
||||||
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -199,8 +198,8 @@ class TestRemoveConfig:
|
||||||
def root(existing_cache):
|
def root(existing_cache):
|
||||||
os.environ["MAMBA_ROOT_PREFIX"] = TestRemoveConfig.root_prefix
|
os.environ["MAMBA_ROOT_PREFIX"] = TestRemoveConfig.root_prefix
|
||||||
os.environ["CONDA_PREFIX"] = TestRemoveConfig.prefix
|
os.environ["CONDA_PREFIX"] = TestRemoveConfig.prefix
|
||||||
create("-n", "base", no_dry_run=True)
|
helpers.create("-n", "base", no_dry_run=True)
|
||||||
create("-n", TestRemoveConfig.env_name, "--offline", no_dry_run=True)
|
helpers.create("-n", TestRemoveConfig.env_name, "--offline", no_dry_run=True)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
@ -226,10 +225,10 @@ class TestRemoveConfig:
|
||||||
assert res["target_prefix"] == target_prefix
|
assert res["target_prefix"] == target_prefix
|
||||||
assert res["use_target_prefix_fallback"]
|
assert res["use_target_prefix_fallback"]
|
||||||
checks = (
|
checks = (
|
||||||
MAMBA_ALLOW_EXISTING_PREFIX
|
helpers.MAMBA_ALLOW_EXISTING_PREFIX
|
||||||
| MAMBA_NOT_ALLOW_MISSING_PREFIX
|
| helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX
|
||||||
| MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
|
| helpers.MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
|
||||||
| MAMBA_EXPECT_EXISTING_PREFIX
|
| helpers.MAMBA_EXPECT_EXISTING_PREFIX
|
||||||
)
|
)
|
||||||
assert res["target_prefix_checks"] == checks
|
assert res["target_prefix_checks"] == checks
|
||||||
|
|
||||||
|
@ -237,7 +236,7 @@ class TestRemoveConfig:
|
||||||
specs = ["xframe", "xtl"]
|
specs = ["xframe", "xtl"]
|
||||||
cmd = list(specs)
|
cmd = list(specs)
|
||||||
|
|
||||||
res = remove(*cmd, "--print-config-only")
|
res = helpers.remove(*cmd, "--print-config-only")
|
||||||
|
|
||||||
TestRemoveConfig.common_tests(res)
|
TestRemoveConfig.common_tests(res)
|
||||||
assert res["env_name"] == ""
|
assert res["env_name"] == ""
|
||||||
|
@ -246,9 +245,9 @@ class TestRemoveConfig:
|
||||||
def test_remove_then_clean(self, env_created):
|
def test_remove_then_clean(self, env_created):
|
||||||
env_file = __this_dir__ / "env-requires-pip-install.yaml"
|
env_file = __this_dir__ / "env-requires-pip-install.yaml"
|
||||||
env_name = "env_to_clean"
|
env_name = "env_to_clean"
|
||||||
create("-n", env_name, "-f", env_file, no_dry_run=True)
|
helpers.create("-n", env_name, "-f", env_file, no_dry_run=True)
|
||||||
remove("-n", env_name, "pip", no_dry_run=True)
|
helpers.remove("-n", env_name, "pip", no_dry_run=True)
|
||||||
clean("-ay", no_dry_run=True)
|
helpers.clean("-ay", no_dry_run=True)
|
||||||
|
|
||||||
@pytest.mark.parametrize("root_prefix", (None, "env_var", "cli"))
|
@pytest.mark.parametrize("root_prefix", (None, "env_var", "cli"))
|
||||||
@pytest.mark.parametrize("target_is_root", (False, True))
|
@pytest.mark.parametrize("target_is_root", (False, True))
|
||||||
|
@ -269,9 +268,7 @@ class TestRemoveConfig:
|
||||||
cmd = []
|
cmd = []
|
||||||
|
|
||||||
if root_prefix in (None, "cli"):
|
if root_prefix in (None, "cli"):
|
||||||
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop(
|
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop("MAMBA_ROOT_PREFIX")
|
||||||
"MAMBA_ROOT_PREFIX"
|
|
||||||
)
|
|
||||||
|
|
||||||
if root_prefix == "cli":
|
if root_prefix == "cli":
|
||||||
cmd += ["-r", TestRemoveConfig.root_prefix]
|
cmd += ["-r", TestRemoveConfig.root_prefix]
|
||||||
|
@ -299,11 +296,9 @@ class TestRemoveConfig:
|
||||||
else:
|
else:
|
||||||
os.environ["CONDA_PREFIX"] = p
|
os.environ["CONDA_PREFIX"] = p
|
||||||
|
|
||||||
if (cli_prefix and cli_env_name) or not (
|
if (cli_prefix and cli_env_name) or not (cli_prefix or cli_env_name or env_var or fallback):
|
||||||
cli_prefix or cli_env_name or env_var or fallback
|
|
||||||
):
|
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
remove(*cmd, "--print-config-only")
|
helpers.remove(*cmd, "--print-config-only")
|
||||||
else:
|
else:
|
||||||
res = remove(*cmd, "--print-config-only")
|
res = helpers.remove(*cmd, "--print-config-only")
|
||||||
TestRemoveConfig.common_tests(res, root_prefix=r, target_prefix=p)
|
TestRemoveConfig.common_tests(res, root_prefix=r, target_prefix=p)
|
||||||
|
|
|
@ -63,9 +63,7 @@ def test_depends_not_installed_with_channel(yaml_env: Path, with_platform):
|
||||||
)
|
)
|
||||||
assert res["result"]["pkgs"][0]["subdir"] == "win-64"
|
assert res["result"]["pkgs"][0]["subdir"] == "win-64"
|
||||||
else:
|
else:
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("depends", "-c", "conda-forge", "xtensor=0.24.5", "--json")
|
||||||
"depends", "-c", "conda-forge", "xtensor=0.24.5", "--json"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert res["query"]["query"] == "xtensor=0.24.5"
|
assert res["query"]["query"] == "xtensor=0.24.5"
|
||||||
assert res["query"]["type"] == "depends"
|
assert res["query"]["type"] == "depends"
|
||||||
|
@ -85,9 +83,7 @@ def test_depends_not_installed_with_channel(yaml_env: Path, with_platform):
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_depends_recursive(yaml_env: Path):
|
def test_depends_recursive(yaml_env: Path):
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("depends", "-c", "conda-forge", "xtensor=0.24.5", "--recursive")
|
||||||
"depends", "-c", "conda-forge", "xtensor=0.24.5", "--recursive"
|
|
||||||
)
|
|
||||||
|
|
||||||
if platform.system() == "Linux":
|
if platform.system() == "Linux":
|
||||||
assert "libzlib" in res
|
assert "libzlib" in res
|
||||||
|
@ -99,9 +95,7 @@ def test_depends_recursive(yaml_env: Path):
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_depends_tree(yaml_env: Path):
|
def test_depends_tree(yaml_env: Path):
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("depends", "-c", "conda-forge", "xtensor=0.24.5", "--tree")
|
||||||
"depends", "-c", "conda-forge", "xtensor=0.24.5", "--tree"
|
|
||||||
)
|
|
||||||
|
|
||||||
if platform.system() == "Linux":
|
if platform.system() == "Linux":
|
||||||
assert "libzlib" in res
|
assert "libzlib" in res
|
||||||
|
@ -151,9 +145,7 @@ def test_whoneeds_not_installed_with_channel(yaml_env: Path, with_platform):
|
||||||
)
|
)
|
||||||
assert res["result"]["pkgs"][0]["subdir"] == "osx-64"
|
assert res["result"]["pkgs"][0]["subdir"] == "osx-64"
|
||||||
else:
|
else:
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--json")
|
||||||
"whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--json"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert res["query"]["query"] == "xtensor=0.24.5"
|
assert res["query"]["query"] == "xtensor=0.24.5"
|
||||||
assert res["query"]["type"] == "whoneeds"
|
assert res["query"]["type"] == "whoneeds"
|
||||||
|
@ -167,9 +159,7 @@ def test_whoneeds_not_installed_with_channel(yaml_env: Path, with_platform):
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_whoneeds_tree(yaml_env: Path):
|
def test_whoneeds_tree(yaml_env: Path):
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--tree")
|
||||||
"whoneeds", "-c", "conda-forge", "xtensor=0.24.5", "--tree"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert "cppcolormap" in res
|
assert "cppcolormap" in res
|
||||||
assert "pyxtensor" in res
|
assert "pyxtensor" in res
|
||||||
|
@ -191,9 +181,7 @@ def test_search(yaml_env: Path, with_platform):
|
||||||
)
|
)
|
||||||
assert res["result"]["pkgs"][0]["subdir"] == "linux-64"
|
assert res["result"]["pkgs"][0]["subdir"] == "linux-64"
|
||||||
else:
|
else:
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("search", "-c", "conda-forge", "xtensor*", "--json")
|
||||||
"search", "-c", "conda-forge", "xtensor*", "--json"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert res["query"]["query"] == "xtensor*"
|
assert res["query"]["query"] == "xtensor*"
|
||||||
assert res["query"]["type"] == "search"
|
assert res["query"]["type"] == "search"
|
||||||
|
@ -229,9 +217,7 @@ def test_local_search_installed_pkg(yaml_env: Path):
|
||||||
|
|
||||||
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True)
|
||||||
def test_remote_search_not_installed_pkg(yaml_env: Path):
|
def test_remote_search_not_installed_pkg(yaml_env: Path):
|
||||||
res = helpers.umamba_repoquery(
|
res = helpers.umamba_repoquery("search", "-c", "conda-forge", "xtensor=0.24.5", "--json")
|
||||||
"search", "-c", "conda-forge", "xtensor=0.24.5", "--json"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert res["query"]["query"] == "xtensor=0.24.5"
|
assert res["query"]["query"] == "xtensor=0.24.5"
|
||||||
assert res["query"]["type"] == "search"
|
assert res["query"]["type"] == "search"
|
||||||
|
|
|
@ -16,9 +16,7 @@ possible_characters_for_process_names = (
|
||||||
|
|
||||||
|
|
||||||
def generate_label_flags():
|
def generate_label_flags():
|
||||||
random_string = "".join(
|
random_string = "".join(random.choice(possible_characters_for_process_names) for _ in range(16))
|
||||||
random.choice(possible_characters_for_process_names) for _ in range(16)
|
|
||||||
)
|
|
||||||
return ["--label", random_string]
|
return ["--label", random_string]
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,14 +34,8 @@ class TestRun:
|
||||||
@pytest.mark.parametrize("option_flag", common_simple_flags)
|
@pytest.mark.parametrize("option_flag", common_simple_flags)
|
||||||
@pytest.mark.parametrize("make_label_flags", next_label_flags)
|
@pytest.mark.parametrize("make_label_flags", next_label_flags)
|
||||||
def test_fail_without_command(self, option_flag, make_label_flags):
|
def test_fail_without_command(self, option_flag, make_label_flags):
|
||||||
fails = True
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
try:
|
|
||||||
umamba_run(option_flag, *make_label_flags())
|
umamba_run(option_flag, *make_label_flags())
|
||||||
fails = False
|
|
||||||
except:
|
|
||||||
fails = True
|
|
||||||
|
|
||||||
assert fails == True
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("option_flag", common_simple_flags)
|
@pytest.mark.parametrize("option_flag", common_simple_flags)
|
||||||
@pytest.mark.parametrize("make_label_flags", next_label_flags)
|
@pytest.mark.parametrize("make_label_flags", next_label_flags)
|
||||||
|
@ -52,15 +44,14 @@ class TestRun:
|
||||||
try:
|
try:
|
||||||
umamba_run(option_flag, *make_label_flags(), "exe-that-does-not-exists")
|
umamba_run(option_flag, *make_label_flags(), "exe-that-does-not-exists")
|
||||||
fails = False
|
fails = False
|
||||||
except:
|
except subprocess.CalledProcessError:
|
||||||
fails = True
|
fails = True
|
||||||
|
|
||||||
# In detach mode we fork micromamba and don't have a way to know if the executable exists.
|
# In detach mode we fork micromamba and don't have a way to know if the executable exists.
|
||||||
if option_flag == "-d" or option_flag == "--detach":
|
if option_flag == "-d" or option_flag == "--detach":
|
||||||
assert fails == False
|
assert fails is False
|
||||||
return
|
else:
|
||||||
|
assert fails is True
|
||||||
assert fails == True
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("option_flag", common_simple_flags)
|
@pytest.mark.parametrize("option_flag", common_simple_flags)
|
||||||
# @pytest.mark.parametrize("label_flags", naming_flags()) # TODO: reactivate after fixing help flag not disactivating the run
|
# @pytest.mark.parametrize("label_flags", naming_flags()) # TODO: reactivate after fixing help flag not disactivating the run
|
||||||
|
@ -86,26 +77,19 @@ class TestRun:
|
||||||
@pytest.mark.skipif(platform == "win32", reason="requires bash to be available")
|
@pytest.mark.skipif(platform == "win32", reason="requires bash to be available")
|
||||||
def test_shell_io_routing(self):
|
def test_shell_io_routing(self):
|
||||||
test_script_file_name = "test_run.sh"
|
test_script_file_name = "test_run.sh"
|
||||||
test_script_path = os.path.join(
|
test_script_path = os.path.join(os.path.dirname(__file__), test_script_file_name)
|
||||||
os.path.dirname(__file__), test_script_file_name
|
|
||||||
)
|
|
||||||
if not os.path.isfile(test_script_path):
|
if not os.path.isfile(test_script_path):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"missing test script '{}' at '{}".format(
|
"missing test script '{}' at '{}".format(test_script_file_name, test_script_path)
|
||||||
test_script_file_name, test_script_path
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
subprocess_run(test_script_path, shell=True)
|
subprocess_run(test_script_path, shell=True)
|
||||||
|
|
||||||
def test_run_non_existing_env(self):
|
def test_run_non_existing_env(self):
|
||||||
env_name = random_string()
|
env_name = random_string()
|
||||||
try:
|
try:
|
||||||
run_res = umamba_run("-n", env_name, "python")
|
umamba_run("-n", env_name, "python")
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
assert (
|
assert "critical libmamba The given prefix does not exist:" in e.stderr.decode()
|
||||||
"critical libmamba The given prefix does not exist:"
|
|
||||||
in e.stderr.decode()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
|
@ -129,7 +113,5 @@ def temp_env_prefix():
|
||||||
|
|
||||||
class TestRunVenv:
|
class TestRunVenv:
|
||||||
def test_classic_specs(self, temp_env_prefix):
|
def test_classic_specs(self, temp_env_prefix):
|
||||||
res = umamba_run(
|
res = umamba_run("-p", temp_env_prefix, "python", "-c", "import sys; print(sys.prefix)")
|
||||||
"-p", temp_env_prefix, "python", "-c", "import sys; print(sys.prefix)"
|
|
||||||
)
|
|
||||||
assert res.strip() == temp_env_prefix
|
assert res.strip() == temp_env_prefix
|
||||||
|
|
|
@ -16,10 +16,7 @@ def skip_if_shell_incompat(shell_type):
|
||||||
if (
|
if (
|
||||||
(plat_system == "Linux" and shell_type not in ("bash", "posix", "dash"))
|
(plat_system == "Linux" and shell_type not in ("bash", "posix", "dash"))
|
||||||
or (plat_system == "Windows" and shell_type not in ("cmd.exe", "powershell"))
|
or (plat_system == "Windows" and shell_type not in ("cmd.exe", "powershell"))
|
||||||
or (
|
or (plat_system == "Darwin" and shell_type not in ("zsh", "bash", "posix", "dash"))
|
||||||
plat_system == "Darwin"
|
|
||||||
and shell_type not in ("zsh", "bash", "posix", "dash")
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
pytest.skip("Incompatible shell/OS")
|
pytest.skip("Incompatible shell/OS")
|
||||||
|
|
||||||
|
@ -103,15 +100,11 @@ def test_auto_detection(tmp_home, tmp_root_prefix):
|
||||||
print(res.stderr)
|
print(res.stderr)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return decode_json_output(
|
return decode_json_output(subprocess.check_output(cmd, text=True, encoding="utf-8"))
|
||||||
subprocess.check_output(cmd, text=True, encoding="utf-8")
|
|
||||||
)
|
|
||||||
|
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
if "MAMBA_TEST_SHELL_TYPE" not in os.environ:
|
if "MAMBA_TEST_SHELL_TYPE" not in os.environ:
|
||||||
pytest.skip(
|
pytest.skip("'MAMBA_TEST_SHELL_TYPE' env variable needs to be defined to run this test")
|
||||||
"'MAMBA_TEST_SHELL_TYPE' env variable needs to be defined to run this test"
|
|
||||||
)
|
|
||||||
shell_type = os.environ["MAMBA_TEST_SHELL_TYPE"]
|
shell_type = os.environ["MAMBA_TEST_SHELL_TYPE"]
|
||||||
if shell_type == "bash":
|
if shell_type == "bash":
|
||||||
pytest.skip(
|
pytest.skip(
|
||||||
|
@ -201,12 +194,8 @@ def test_activate_target_prefix_checks(tmp_home, tmp_root_prefix):
|
||||||
|
|
||||||
@pytest.mark.parametrize("shell_type", ["bash", "powershell", "cmd.exe"])
|
@pytest.mark.parametrize("shell_type", ["bash", "powershell", "cmd.exe"])
|
||||||
@pytest.mark.parametrize("prefix_selector", [None, "prefix"])
|
@pytest.mark.parametrize("prefix_selector", [None, "prefix"])
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize("multiple_time,same_prefix", ((False, None), (True, False), (True, True)))
|
||||||
"multiple_time,same_prefix", ((False, None), (True, False), (True, True))
|
def test_init(tmp_home, tmp_root_prefix, shell_type, prefix_selector, multiple_time, same_prefix):
|
||||||
)
|
|
||||||
def test_init(
|
|
||||||
tmp_home, tmp_root_prefix, shell_type, prefix_selector, multiple_time, same_prefix
|
|
||||||
):
|
|
||||||
skip_if_shell_incompat(shell_type)
|
skip_if_shell_incompat(shell_type)
|
||||||
|
|
||||||
if prefix_selector is None:
|
if prefix_selector is None:
|
||||||
|
@ -223,9 +212,7 @@ def test_init(
|
||||||
"Windows long-path support already enabled.",
|
"Windows long-path support already enabled.",
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
assert helpers.shell(
|
assert helpers.shell("-y", "init", "-s", shell_type, "-r", tmp_root_prefix / "env")
|
||||||
"-y", "init", "-s", shell_type, "-r", tmp_root_prefix / "env"
|
|
||||||
)
|
|
||||||
|
|
||||||
if shell_type == "bash":
|
if shell_type == "bash":
|
||||||
assert (tmp_root_prefix / "etc" / "profile.d").is_dir()
|
assert (tmp_root_prefix / "etc" / "profile.d").is_dir()
|
||||||
|
|
|
@ -5,16 +5,20 @@ from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .helpers import *
|
# Need to import everything to get fixtures
|
||||||
|
from .helpers import * # noqa: F403
|
||||||
|
from . import helpers
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(dry_run_tests == DryRun.ULTRA_DRY, reason="Running ultra dry tests")
|
@pytest.mark.skipif(
|
||||||
|
helpers.dry_run_tests == helpers.DryRun.ULTRA_DRY, reason="Running ultra dry tests"
|
||||||
|
)
|
||||||
class TestUpdate:
|
class TestUpdate:
|
||||||
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
old_version = "0.21.10"
|
old_version = "0.21.10"
|
||||||
medium_old_version = "0.22"
|
medium_old_version = "0.22"
|
||||||
|
@ -34,15 +38,15 @@ class TestUpdate:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def env_created(root):
|
def env_created(root):
|
||||||
if dry_run_tests == DryRun.OFF:
|
if helpers.dry_run_tests == helpers.DryRun.OFF:
|
||||||
create(
|
helpers.create(
|
||||||
f"xtensor={TestUpdate.old_version}",
|
f"xtensor={TestUpdate.old_version}",
|
||||||
"-n",
|
"-n",
|
||||||
TestUpdate.env_name,
|
TestUpdate.env_name,
|
||||||
"--json",
|
"--json",
|
||||||
no_dry_run=True,
|
no_dry_run=True,
|
||||||
)
|
)
|
||||||
res = umamba_list("xtensor", "-n", TestUpdate.env_name, "--json")
|
res = helpers.umamba_list("xtensor", "-n", TestUpdate.env_name, "--json")
|
||||||
assert len(res) == 1
|
assert len(res) == 1
|
||||||
assert res[0]["version"].startswith(TestUpdate.old_version)
|
assert res[0]["version"].startswith(TestUpdate.old_version)
|
||||||
|
|
||||||
|
@ -51,19 +55,19 @@ class TestUpdate:
|
||||||
shutil.rmtree(TestUpdate.prefix)
|
shutil.rmtree(TestUpdate.prefix)
|
||||||
|
|
||||||
def test_constrained_update(self, env_created):
|
def test_constrained_update(self, env_created):
|
||||||
update_res = update(
|
update_res = helpers.update(
|
||||||
"xtensor<=" + self.medium_old_version, "-n", env_created, "--json"
|
"xtensor<=" + self.medium_old_version, "-n", env_created, "--json"
|
||||||
)
|
)
|
||||||
xtensor_link = [
|
xtensor_link = [
|
||||||
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
|
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
|
||||||
][0]
|
][0]
|
||||||
|
|
||||||
assert xtensor_link["version"].startswith(self.medium_old_version)
|
assert xtensor_link["version"].startswith(self.medium_old_version)
|
||||||
|
|
||||||
# test that we relink noarch packages
|
# test that we relink noarch packages
|
||||||
def test_update_python_noarch(self, root):
|
def test_update_python_noarch(self, root):
|
||||||
if dry_run_tests == DryRun.OFF:
|
if helpers.dry_run_tests == helpers.DryRun.OFF:
|
||||||
res_create = create(
|
helpers.create(
|
||||||
"python=3.9",
|
"python=3.9",
|
||||||
"six",
|
"six",
|
||||||
"requests",
|
"requests",
|
||||||
|
@ -75,12 +79,12 @@ class TestUpdate:
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
res = umamba_list("python", "-n", TestUpdate.env_name, "--json")
|
res = helpers.umamba_list("python", "-n", TestUpdate.env_name, "--json")
|
||||||
assert len(res) >= 1
|
assert len(res) >= 1
|
||||||
pyelem = [r for r in res if r["name"] == "python"][0]
|
pyelem = [r for r in res if r["name"] == "python"][0]
|
||||||
assert pyelem["version"].startswith("3.9")
|
assert pyelem["version"].startswith("3.9")
|
||||||
|
|
||||||
res = umamba_list("requests", "-n", TestUpdate.env_name, "--json")
|
res = helpers.umamba_list("requests", "-n", TestUpdate.env_name, "--json")
|
||||||
prev_requests = [r for r in res if r["name"] == "requests"][0]
|
prev_requests = [r for r in res if r["name"] == "requests"][0]
|
||||||
assert prev_requests["version"]
|
assert prev_requests["version"]
|
||||||
|
|
||||||
|
@ -92,20 +96,22 @@ class TestUpdate:
|
||||||
|
|
||||||
assert os.path.exists(site_packages_path("requests/__pycache__", "3.9"))
|
assert os.path.exists(site_packages_path("requests/__pycache__", "3.9"))
|
||||||
|
|
||||||
prev_six = umamba_list("six", "-n", TestUpdate.env_name, "--json")[0]
|
prev_six = helpers.umamba_list("six", "-n", TestUpdate.env_name, "--json")[0]
|
||||||
|
|
||||||
update_res = update("-n", TestUpdate.env_name, "python=3.10", "--json")
|
update_res = helpers.update("-n", TestUpdate.env_name, "python=3.10", "--json")
|
||||||
|
|
||||||
six_link = [l for l in update_res["actions"]["LINK"] if l["name"] == "six"][0]
|
six_link = [
|
||||||
|
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "six"
|
||||||
|
][0]
|
||||||
|
|
||||||
assert six_link["version"] == prev_six["version"]
|
assert six_link["version"] == prev_six["version"]
|
||||||
assert six_link["build_string"] == prev_six["build_string"]
|
assert six_link["build_string"] == prev_six["build_string"]
|
||||||
|
|
||||||
requests_link = [
|
requests_link = [
|
||||||
l for l in update_res["actions"]["LINK"] if l["name"] == "requests"
|
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "requests"
|
||||||
][0]
|
][0]
|
||||||
requests_unlink = [
|
requests_unlink = [
|
||||||
l for l in update_res["actions"]["UNLINK"] if l["name"] == "requests"
|
to_link for to_link in update_res["actions"]["UNLINK"] if to_link["name"] == "requests"
|
||||||
][0]
|
][0]
|
||||||
|
|
||||||
assert requests_link["version"] == requests_unlink["version"]
|
assert requests_link["version"] == requests_unlink["version"]
|
||||||
|
@ -117,48 +123,48 @@ class TestUpdate:
|
||||||
assert requests_link["build_string"] == prev_requests["build_string"]
|
assert requests_link["build_string"] == prev_requests["build_string"]
|
||||||
|
|
||||||
def test_further_constrained_update(self, env_created):
|
def test_further_constrained_update(self, env_created):
|
||||||
update_res = update("xtensor==0.21.1=*_0", "--json")
|
update_res = helpers.update("xtensor==0.21.1=*_0", "--json")
|
||||||
xtensor_link = [
|
xtensor_link = [
|
||||||
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
|
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
|
||||||
][0]
|
][0]
|
||||||
|
|
||||||
assert xtensor_link["version"] == "0.21.1"
|
assert xtensor_link["version"] == "0.21.1"
|
||||||
assert xtensor_link["build_number"] == 0
|
assert xtensor_link["build_number"] == 0
|
||||||
|
|
||||||
def test_classic_spec(self, env_created):
|
def test_classic_spec(self, env_created):
|
||||||
update_res = update("xtensor", "--json", "-n", TestUpdate.env_name)
|
update_res = helpers.update("xtensor", "--json", "-n", TestUpdate.env_name)
|
||||||
|
|
||||||
xtensor_link = [
|
xtensor_link = [
|
||||||
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
|
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
|
||||||
][0]
|
][0]
|
||||||
assert TestUpdate.old_version != xtensor_link["version"]
|
assert TestUpdate.old_version != xtensor_link["version"]
|
||||||
|
|
||||||
if dry_run_tests == DryRun.OFF:
|
if helpers.dry_run_tests == helpers.DryRun.OFF:
|
||||||
pkg = get_concrete_pkg(update_res, "xtensor")
|
pkg = helpers.get_concrete_pkg(update_res, "xtensor")
|
||||||
pkg_info = get_concrete_pkg_info(get_env(TestUpdate.env_name), pkg)
|
pkg_info = helpers.get_concrete_pkg_info(helpers.get_env(TestUpdate.env_name), pkg)
|
||||||
version = pkg_info["version"]
|
version = pkg_info["version"]
|
||||||
|
|
||||||
assert TestUpdate.old_version != version
|
assert TestUpdate.old_version != version
|
||||||
|
|
||||||
# This should do nothing since python is not installed!
|
# This should do nothing since python is not installed!
|
||||||
update_res = update("python", "-n", TestUpdate.env_name, "--json")
|
update_res = helpers.update("python", "-n", TestUpdate.env_name, "--json")
|
||||||
|
|
||||||
# TODO fix this?!
|
# TODO fix this?!
|
||||||
assert update_res["message"] == "All requested packages already installed"
|
assert update_res["message"] == "All requested packages already installed"
|
||||||
assert update_res["success"] == True
|
assert update_res["success"] is True
|
||||||
assert "action" not in update_res
|
assert "action" not in update_res
|
||||||
|
|
||||||
def test_update_all(self, env_created):
|
def test_update_all(self, env_created):
|
||||||
update_res = update("--all", "--json")
|
update_res = helpers.update("--all", "--json")
|
||||||
|
|
||||||
xtensor_link = [
|
xtensor_link = [
|
||||||
l for l in update_res["actions"]["LINK"] if l["name"] == "xtensor"
|
to_link for to_link in update_res["actions"]["LINK"] if to_link["name"] == "xtensor"
|
||||||
][0]
|
][0]
|
||||||
assert TestUpdate.old_version != xtensor_link["version"]
|
assert TestUpdate.old_version != xtensor_link["version"]
|
||||||
|
|
||||||
if dry_run_tests == DryRun.OFF:
|
if helpers.dry_run_tests == helpers.DryRun.OFF:
|
||||||
pkg = get_concrete_pkg(update_res, "xtensor")
|
pkg = helpers.get_concrete_pkg(update_res, "xtensor")
|
||||||
pkg_info = get_concrete_pkg_info(get_env(TestUpdate.env_name), pkg)
|
pkg_info = helpers.get_concrete_pkg_info(helpers.get_env(TestUpdate.env_name), pkg)
|
||||||
version = pkg_info["version"]
|
version = pkg_info["version"]
|
||||||
|
|
||||||
assert TestUpdate.old_version != version
|
assert TestUpdate.old_version != version
|
||||||
|
@ -183,7 +189,7 @@ class TestUpdate:
|
||||||
)
|
)
|
||||||
def test_channel_alias(self, alias, env_created):
|
def test_channel_alias(self, alias, env_created):
|
||||||
if alias:
|
if alias:
|
||||||
res = update(
|
res = helpers.update(
|
||||||
"-n",
|
"-n",
|
||||||
TestUpdate.env_name,
|
TestUpdate.env_name,
|
||||||
"xtensor",
|
"xtensor",
|
||||||
|
@ -194,20 +200,20 @@ class TestUpdate:
|
||||||
)
|
)
|
||||||
ca = alias.rstrip("/")
|
ca = alias.rstrip("/")
|
||||||
else:
|
else:
|
||||||
res = update("-n", TestUpdate.env_name, "xtensor", "--json", "--dry-run")
|
res = helpers.update("-n", TestUpdate.env_name, "xtensor", "--json", "--dry-run")
|
||||||
ca = "https://conda.anaconda.org"
|
ca = "https://conda.anaconda.org"
|
||||||
|
|
||||||
for l in res["actions"]["LINK"]:
|
for to_link in res["actions"]["LINK"]:
|
||||||
assert l["channel"].startswith(f"{ca}/conda-forge/")
|
assert to_link["channel"].startswith(f"{ca}/conda-forge/")
|
||||||
assert l["url"].startswith(f"{ca}/conda-forge/")
|
assert to_link["url"].startswith(f"{ca}/conda-forge/")
|
||||||
|
|
||||||
|
|
||||||
class TestUpdateConfig:
|
class TestUpdateConfig:
|
||||||
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"]
|
||||||
current_prefix = os.environ["CONDA_PREFIX"]
|
current_prefix = os.environ["CONDA_PREFIX"]
|
||||||
|
|
||||||
env_name = random_string()
|
env_name = helpers.random_string()
|
||||||
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string()))
|
root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + helpers.random_string()))
|
||||||
prefix = os.path.join(root_prefix, "envs", env_name)
|
prefix = os.path.join(root_prefix, "envs", env_name)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -215,8 +221,8 @@ class TestUpdateConfig:
|
||||||
def root(existing_cache):
|
def root(existing_cache):
|
||||||
os.environ["MAMBA_ROOT_PREFIX"] = TestUpdateConfig.root_prefix
|
os.environ["MAMBA_ROOT_PREFIX"] = TestUpdateConfig.root_prefix
|
||||||
os.environ["CONDA_PREFIX"] = TestUpdateConfig.prefix
|
os.environ["CONDA_PREFIX"] = TestUpdateConfig.prefix
|
||||||
create("-n", "base", no_dry_run=True)
|
helpers.create("-n", "base", no_dry_run=True)
|
||||||
create("-n", TestUpdateConfig.env_name, "--offline", no_dry_run=True)
|
helpers.create("-n", TestUpdateConfig.env_name, "--offline", no_dry_run=True)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
@ -242,10 +248,10 @@ class TestUpdateConfig:
|
||||||
assert res["target_prefix"] == target_prefix
|
assert res["target_prefix"] == target_prefix
|
||||||
assert res["use_target_prefix_fallback"]
|
assert res["use_target_prefix_fallback"]
|
||||||
checks = (
|
checks = (
|
||||||
MAMBA_ALLOW_EXISTING_PREFIX
|
helpers.MAMBA_ALLOW_EXISTING_PREFIX
|
||||||
| MAMBA_NOT_ALLOW_MISSING_PREFIX
|
| helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX
|
||||||
| MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
|
| helpers.MAMBA_NOT_ALLOW_NOT_ENV_PREFIX
|
||||||
| MAMBA_EXPECT_EXISTING_PREFIX
|
| helpers.MAMBA_EXPECT_EXISTING_PREFIX
|
||||||
)
|
)
|
||||||
assert res["target_prefix_checks"] == checks
|
assert res["target_prefix_checks"] == checks
|
||||||
|
|
||||||
|
@ -270,7 +276,7 @@ class TestUpdateConfig:
|
||||||
cmd = list(specs)
|
cmd = list(specs)
|
||||||
|
|
||||||
if source in ("spec_file_only", "both"):
|
if source in ("spec_file_only", "both"):
|
||||||
f_name = random_string()
|
f_name = helpers.random_string()
|
||||||
spec_file = os.path.join(TestUpdateConfig.root_prefix, f_name)
|
spec_file = os.path.join(TestUpdateConfig.root_prefix, f_name)
|
||||||
|
|
||||||
if file_type == "classic":
|
if file_type == "classic":
|
||||||
|
@ -293,7 +299,7 @@ class TestUpdateConfig:
|
||||||
|
|
||||||
cmd += ["-f", spec_file]
|
cmd += ["-f", spec_file]
|
||||||
|
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
|
|
||||||
TestUpdateConfig.config_tests(res)
|
TestUpdateConfig.config_tests(res)
|
||||||
assert res["env_name"] == ""
|
assert res["env_name"] == ""
|
||||||
|
@ -320,9 +326,7 @@ class TestUpdateConfig:
|
||||||
cmd = []
|
cmd = []
|
||||||
|
|
||||||
if root_prefix in (None, "cli"):
|
if root_prefix in (None, "cli"):
|
||||||
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop(
|
os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop("MAMBA_ROOT_PREFIX")
|
||||||
"MAMBA_ROOT_PREFIX"
|
|
||||||
)
|
|
||||||
|
|
||||||
if root_prefix == "cli":
|
if root_prefix == "cli":
|
||||||
cmd += ["-r", TestUpdateConfig.root_prefix]
|
cmd += ["-r", TestUpdateConfig.root_prefix]
|
||||||
|
@ -345,7 +349,7 @@ class TestUpdateConfig:
|
||||||
cmd += ["-n", n]
|
cmd += ["-n", n]
|
||||||
|
|
||||||
if yaml_name:
|
if yaml_name:
|
||||||
f_name = random_string() + ".yaml"
|
f_name = helpers.random_string() + ".yaml"
|
||||||
spec_file = os.path.join(TestUpdateConfig.prefix, f_name)
|
spec_file = os.path.join(TestUpdateConfig.prefix, f_name)
|
||||||
|
|
||||||
if yaml_name == "prefix":
|
if yaml_name == "prefix":
|
||||||
|
@ -353,9 +357,7 @@ class TestUpdateConfig:
|
||||||
else:
|
else:
|
||||||
yaml_n = n
|
yaml_n = n
|
||||||
if not (cli_prefix or cli_env_name or target_is_root):
|
if not (cli_prefix or cli_env_name or target_is_root):
|
||||||
expected_p = os.path.join(
|
expected_p = os.path.join(TestUpdateConfig.root_prefix, "envs", yaml_n)
|
||||||
TestUpdateConfig.root_prefix, "envs", yaml_n
|
|
||||||
)
|
|
||||||
|
|
||||||
file_content = [
|
file_content = [
|
||||||
f"name: {yaml_n}",
|
f"name: {yaml_n}",
|
||||||
|
@ -379,10 +381,10 @@ class TestUpdateConfig:
|
||||||
or (yaml_name == "prefix")
|
or (yaml_name == "prefix")
|
||||||
or not (cli_prefix or cli_env_name or yaml_name or env_var or fallback)
|
or not (cli_prefix or cli_env_name or yaml_name or env_var or fallback)
|
||||||
):
|
):
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(helpers.subprocess.CalledProcessError):
|
||||||
install(*cmd, "--print-config-only")
|
helpers.install(*cmd, "--print-config-only")
|
||||||
else:
|
else:
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
TestUpdateConfig.config_tests(res, root_prefix=r, target_prefix=expected_p)
|
TestUpdateConfig.config_tests(res, root_prefix=r, target_prefix=expected_p)
|
||||||
|
|
||||||
@pytest.mark.parametrize("cli", (False, True))
|
@pytest.mark.parametrize("cli", (False, True))
|
||||||
|
@ -398,7 +400,7 @@ class TestUpdateConfig:
|
||||||
expected_channels += ["cli"]
|
expected_channels += ["cli"]
|
||||||
|
|
||||||
if yaml:
|
if yaml:
|
||||||
f_name = random_string() + ".yaml"
|
f_name = helpers.random_string() + ".yaml"
|
||||||
spec_file = os.path.join(TestUpdateConfig.prefix, f_name)
|
spec_file = os.path.join(TestUpdateConfig.prefix, f_name)
|
||||||
|
|
||||||
file_content = [
|
file_content = [
|
||||||
|
@ -416,7 +418,7 @@ class TestUpdateConfig:
|
||||||
expected_channels += ["env_var"]
|
expected_channels += ["env_var"]
|
||||||
|
|
||||||
if rc_file:
|
if rc_file:
|
||||||
f_name = random_string() + ".yaml"
|
f_name = helpers.random_string() + ".yaml"
|
||||||
rc_file = os.path.join(TestUpdateConfig.prefix, f_name)
|
rc_file = os.path.join(TestUpdateConfig.prefix, f_name)
|
||||||
|
|
||||||
file_content = ["channels: [rc]"]
|
file_content = ["channels: [rc]"]
|
||||||
|
@ -426,9 +428,7 @@ class TestUpdateConfig:
|
||||||
cmd += ["--rc-file", rc_file]
|
cmd += ["--rc-file", rc_file]
|
||||||
expected_channels += ["rc"]
|
expected_channels += ["rc"]
|
||||||
|
|
||||||
res = install(
|
res = helpers.install(*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False)
|
||||||
*cmd, "--print-config-only", no_rc=not rc_file, default_channel=False
|
|
||||||
)
|
|
||||||
TestUpdateConfig.config_tests(res)
|
TestUpdateConfig.config_tests(res)
|
||||||
if expected_channels:
|
if expected_channels:
|
||||||
assert res["channels"] == expected_channels
|
assert res["channels"] == expected_channels
|
||||||
|
@ -445,7 +445,7 @@ class TestUpdateConfig:
|
||||||
]
|
]
|
||||||
|
|
||||||
for i in range(2):
|
for i in range(2):
|
||||||
f_name = random_string()
|
f_name = helpers.random_string()
|
||||||
file = os.path.join(TestUpdateConfig.prefix, f_name)
|
file = os.path.join(TestUpdateConfig.prefix, f_name)
|
||||||
|
|
||||||
if type == "yaml":
|
if type == "yaml":
|
||||||
|
@ -453,7 +453,6 @@ class TestUpdateConfig:
|
||||||
file_content = [f"dependencies: [{specs[i]}]"]
|
file_content = [f"dependencies: [{specs[i]}]"]
|
||||||
elif type == "classic":
|
elif type == "classic":
|
||||||
file_content = [specs[i]]
|
file_content = [specs[i]]
|
||||||
expected_specs = specs
|
|
||||||
else: # explicit
|
else: # explicit
|
||||||
file_content = ["@EXPLICIT", explicit_specs[i]]
|
file_content = ["@EXPLICIT", explicit_specs[i]]
|
||||||
|
|
||||||
|
@ -463,16 +462,16 @@ class TestUpdateConfig:
|
||||||
cmd += ["-f", file]
|
cmd += ["-f", file]
|
||||||
|
|
||||||
if type == "yaml":
|
if type == "yaml":
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(helpers.subprocess.CalledProcessError):
|
||||||
install(*cmd, "--print-config-only")
|
helpers.install(*cmd, "--print-config-only")
|
||||||
else:
|
else:
|
||||||
res = install(*cmd, "--print-config-only")
|
res = helpers.install(*cmd, "--print-config-only")
|
||||||
if type == "classic":
|
if type == "classic":
|
||||||
assert res["specs"] == specs
|
assert res["specs"] == specs
|
||||||
else: # explicit
|
else: # explicit
|
||||||
assert res["specs"] == [explicit_specs[0]]
|
assert res["specs"] == [explicit_specs[0]]
|
||||||
|
|
||||||
def test_channel_specific(self, env_created):
|
def test_channel_specific(self, env_created):
|
||||||
install("quantstack::sphinx", no_dry_run=True)
|
helpers.install("quantstack::sphinx", no_dry_run=True)
|
||||||
res = update("quantstack::sphinx", "-c", "conda-forge", "--json")
|
res = helpers.update("quantstack::sphinx", "-c", "conda-forge", "--json")
|
||||||
assert "actions" not in res
|
assert "actions" not in res
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from .helpers import info
|
from .helpers import info
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,3 +2,9 @@
|
||||||
minversion = "6.0"
|
minversion = "6.0"
|
||||||
tmp_path_retention_policy = "failed"
|
tmp_path_retention_policy = "failed"
|
||||||
addopts = "--color=yes"
|
addopts = "--color=yes"
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 100
|
||||||
|
target-version = "py37"
|
||||||
|
[tool.ruff.format]
|
||||||
|
line-ending = "lf"
|
||||||
|
|
|
@ -161,9 +161,7 @@ def main():
|
||||||
else:
|
else:
|
||||||
sections[-1].items.append(Item())
|
sections[-1].items.append(Item())
|
||||||
sections[-1].items[-1].text = c[m.end() :].strip()
|
sections[-1].items[-1].text = c[m.end() :].strip()
|
||||||
sections[-1].items[-1].applies_to = [
|
sections[-1].items[-1].applies_to = [x.strip() for x in m.groups(1)[0].split(",")]
|
||||||
x.strip() for x in m.groups(1)[0].split(",")
|
|
||||||
]
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if c.startswith(" "):
|
if c.startswith(" "):
|
||||||
|
|
Loading…
Reference in New Issue