mirror of https://github.com/mamba-org/mamba.git
maint: Add pyupgrade pre-commit hook (#3671)
This commit is contained in:
parent
9d6281c5ed
commit
f67914ef8a
|
@ -22,8 +22,13 @@ repos:
|
||||||
- id: rst-backticks
|
- id: rst-backticks
|
||||||
- id: rst-directive-colons
|
- id: rst-directive-colons
|
||||||
- id: rst-inline-touching-normal
|
- id: rst-inline-touching-normal
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.19.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py39-plus]
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.7.3
|
rev: v0.8.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [ --fix ]
|
args: [ --fix ]
|
||||||
|
|
|
@ -25,7 +25,6 @@ import sphinx
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import Directive, directives
|
from docutils.parsers.rst import Directive, directives
|
||||||
from docutils.statemachine import ViewList
|
from docutils.statemachine import ViewList
|
||||||
from six import text_type
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.errors import SphinxError
|
from sphinx.errors import SphinxError
|
||||||
from sphinx.locale import _
|
from sphinx.locale import _
|
||||||
|
@ -96,7 +95,7 @@ class Mermaid(Directive):
|
||||||
try:
|
try:
|
||||||
with codecs.open(filename, "r", "utf-8") as fp:
|
with codecs.open(filename, "r", "utf-8") as fp:
|
||||||
mmcode = fp.read()
|
mmcode = fp.read()
|
||||||
except (IOError, OSError): # noqa
|
except OSError: # noqa
|
||||||
return [
|
return [
|
||||||
document.reporter.warning(
|
document.reporter.warning(
|
||||||
"External Mermaid file %r not found or reading " "it failed" % filename,
|
"External Mermaid file %r not found or reading " "it failed" % filename,
|
||||||
|
@ -144,8 +143,8 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
||||||
"utf-8"
|
"utf-8"
|
||||||
)
|
)
|
||||||
|
|
||||||
basename = "%s-%s" % (prefix, sha1(hashkey).hexdigest())
|
basename = f"{prefix}-{sha1(hashkey).hexdigest()}"
|
||||||
fname = "%s.%s" % (basename, fmt)
|
fname = f"{basename}.{fmt}"
|
||||||
relfn = posixpath.join(self.builder.imgpath, fname)
|
relfn = posixpath.join(self.builder.imgpath, fname)
|
||||||
outdir = os.path.join(self.builder.outdir, self.builder.imagedir)
|
outdir = os.path.join(self.builder.outdir, self.builder.imagedir)
|
||||||
outfn = os.path.join(outdir, fname)
|
outfn = os.path.join(outdir, fname)
|
||||||
|
@ -157,7 +156,7 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
||||||
ensuredir(os.path.dirname(outfn))
|
ensuredir(os.path.dirname(outfn))
|
||||||
|
|
||||||
# mermaid expects UTF-8 by default
|
# mermaid expects UTF-8 by default
|
||||||
if isinstance(code, text_type):
|
if isinstance(code, str):
|
||||||
code = code.encode("utf-8")
|
code = code.encode("utf-8")
|
||||||
|
|
||||||
with open(tmpfn, "wb") as t:
|
with open(tmpfn, "wb") as t:
|
||||||
|
@ -235,8 +234,8 @@ def render_mm_html(self, node, code, options, prefix="mermaid", imgcls=None, alt
|
||||||
alt = node.get("alt", self.encode(code).strip())
|
alt = node.get("alt", self.encode(code).strip())
|
||||||
imgcss = imgcls and 'class="%s"' % imgcls or ""
|
imgcss = imgcls and 'class="%s"' % imgcls or ""
|
||||||
if fmt == "svg":
|
if fmt == "svg":
|
||||||
svgtag = """<object data="%s" type="image/svg+xml">
|
svgtag = """<object data="{}" type="image/svg+xml">
|
||||||
<p class="warning">%s</p></object>\n""" % (
|
<p class="warning">{}</p></object>\n""".format(
|
||||||
fname,
|
fname,
|
||||||
alt,
|
alt,
|
||||||
)
|
)
|
||||||
|
@ -244,10 +243,10 @@ def render_mm_html(self, node, code, options, prefix="mermaid", imgcls=None, alt
|
||||||
else:
|
else:
|
||||||
if "align" in node:
|
if "align" in node:
|
||||||
self.body.append(
|
self.body.append(
|
||||||
'<div align="%s" class="align-%s">' % (node["align"], node["align"])
|
'<div align="{}" class="align-{}">'.format(node["align"], node["align"])
|
||||||
)
|
)
|
||||||
|
|
||||||
self.body.append('<img src="%s" alt="%s" %s/>\n' % (fname, alt, imgcss))
|
self.body.append(f'<img src="{fname}" alt="{alt}" {imgcss}/>\n')
|
||||||
if "align" in node:
|
if "align" in node:
|
||||||
self.body.append("</div>\n")
|
self.body.append("</div>\n")
|
||||||
|
|
||||||
|
@ -310,9 +309,7 @@ def render_mm_latex(self, node, code, options, prefix="mermaid"):
|
||||||
elif node["align"] == "right":
|
elif node["align"] == "right":
|
||||||
self.body.append("{\\hspace*{\\fill}")
|
self.body.append("{\\hspace*{\\fill}")
|
||||||
post = "}"
|
post = "}"
|
||||||
self.body.append(
|
self.body.append(f"{para_separator}\\sphinxincludegraphics{{{fname}}}{para_separator}")
|
||||||
"%s\\sphinxincludegraphics{%s}%s" % (para_separator, fname, para_separator)
|
|
||||||
)
|
|
||||||
if post:
|
if post:
|
||||||
self.body.append(post)
|
self.body.append(post)
|
||||||
|
|
||||||
|
@ -356,7 +353,7 @@ def man_visit_mermaid(self, node):
|
||||||
|
|
||||||
def config_inited(app, config):
|
def config_inited(app, config):
|
||||||
version = config.mermaid_version
|
version = config.mermaid_version
|
||||||
mermaid_js_url = "https://unpkg.com/mermaid@{}/dist/mermaid.min.js".format(version)
|
mermaid_js_url = f"https://unpkg.com/mermaid@{version}/dist/mermaid.min.js"
|
||||||
app.add_js_file(mermaid_js_url)
|
app.add_js_file(mermaid_js_url)
|
||||||
app.add_js_file(
|
app.add_js_file(
|
||||||
None,
|
None,
|
||||||
|
|
|
@ -29,7 +29,8 @@ r"""
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any, Dict, Iterable, List, cast
|
from typing import Any, cast
|
||||||
|
from collections.abc import Iterable
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
@ -80,22 +81,22 @@ class MermaidGraph(InheritanceGraph):
|
||||||
# 'style': '"setlinewidth(0.5)"',
|
# 'style': '"setlinewidth(0.5)"',
|
||||||
# }
|
# }
|
||||||
|
|
||||||
def _format_node_attrs(self, attrs: Dict) -> str:
|
def _format_node_attrs(self, attrs: dict) -> str:
|
||||||
# return ','.join(['%s=%s' % x for x in sorted(attrs.items())])
|
# return ','.join(['%s=%s' % x for x in sorted(attrs.items())])
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def _format_graph_attrs(self, attrs: Dict) -> str:
|
def _format_graph_attrs(self, attrs: dict) -> str:
|
||||||
# return ''.join(['%s=%s;\n' % x for x in sorted(attrs.items())])
|
# return ''.join(['%s=%s;\n' % x for x in sorted(attrs.items())])
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def generate_dot(
|
def generate_dot(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
urls: Dict = {}, # noqa
|
urls: dict = {}, # noqa
|
||||||
env: BuildEnvironment = None,
|
env: BuildEnvironment = None,
|
||||||
graph_attrs: Dict = {}, # noqa
|
graph_attrs: dict = {}, # noqa
|
||||||
node_attrs: Dict = {}, # noqa
|
node_attrs: dict = {}, # noqa
|
||||||
edge_attrs: Dict = {}, # noqa
|
edge_attrs: dict = {}, # noqa
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Generate a mermaid graph from the classes that were passed in
|
"""Generate a mermaid graph from the classes that were passed in
|
||||||
to __init__.
|
to __init__.
|
||||||
|
@ -120,17 +121,17 @@ class MermaidGraph(InheritanceGraph):
|
||||||
res.append("classDiagram\n")
|
res.append("classDiagram\n")
|
||||||
for name, fullname, bases, tooltip in sorted(self.class_info):
|
for name, fullname, bases, tooltip in sorted(self.class_info):
|
||||||
# Write the node
|
# Write the node
|
||||||
res.append(" class {!s}\n".format(name))
|
res.append(f" class {name!s}\n")
|
||||||
if fullname in urls:
|
if fullname in urls:
|
||||||
res.append(
|
res.append(
|
||||||
' link {!s} "./{!s}" {!s}\n'.format(
|
' link {!s} "./{!s}" {!s}\n'.format(
|
||||||
name, urls[fullname], tooltip or '"{}"'.format(name)
|
name, urls[fullname], tooltip or f'"{name}"'
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Write the edges
|
# Write the edges
|
||||||
for base_name in bases:
|
for base_name in bases:
|
||||||
res.append(" {!s} <|-- {!s}\n".format(base_name, name))
|
res.append(f" {base_name!s} <|-- {name!s}\n")
|
||||||
|
|
||||||
return "".join(res)
|
return "".join(res)
|
||||||
|
|
||||||
|
@ -159,7 +160,7 @@ class MermaidDiagram(InheritanceDiagram):
|
||||||
"top-classes": directives.unchanged_required,
|
"top-classes": directives.unchanged_required,
|
||||||
}
|
}
|
||||||
|
|
||||||
def run(self) -> List[Node]:
|
def run(self) -> list[Node]:
|
||||||
node = mermaid_inheritance()
|
node = mermaid_inheritance()
|
||||||
node.document = self.state.document
|
node.document = self.state.document
|
||||||
class_names = self.arguments[0].split()
|
class_names = self.arguments[0].split()
|
||||||
|
@ -283,7 +284,7 @@ def texinfo_visit_mermaid_inheritance(self: TexinfoTranslator, node: inheritance
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
|
|
||||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
app.setup_extension("mermaid")
|
app.setup_extension("mermaid")
|
||||||
app.add_node(
|
app.add_node(
|
||||||
mermaid_inheritance,
|
mermaid_inheritance,
|
||||||
|
|
|
@ -2,4 +2,4 @@ version_info = ("2", "0", "5")
|
||||||
version_prerelease = "dev0"
|
version_prerelease = "dev0"
|
||||||
__version__ = ".".join(map(str, version_info))
|
__version__ = ".".join(map(str, version_info))
|
||||||
if version_prerelease != "":
|
if version_prerelease != "":
|
||||||
__version__ = "{}.{}".format(__version__, version_prerelease)
|
__version__ = f"{__version__}.{version_prerelease}"
|
||||||
|
|
|
@ -7,7 +7,6 @@ import shutil
|
||||||
import sys
|
import sys
|
||||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import conda_content_trust.authentication as cct_authentication
|
import conda_content_trust.authentication as cct_authentication
|
||||||
|
@ -34,7 +33,7 @@ def get_fingerprint(gpg_output: str) -> str:
|
||||||
return fpline
|
return fpline
|
||||||
|
|
||||||
|
|
||||||
KeySet = Dict[str, List[Dict[str, str]]]
|
KeySet = dict[str, list[dict[str, str]]]
|
||||||
|
|
||||||
|
|
||||||
def normalize_keys(keys: KeySet) -> KeySet:
|
def normalize_keys(keys: KeySet) -> KeySet:
|
||||||
|
|
|
@ -2,7 +2,8 @@ import copy
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import platform
|
import platform
|
||||||
from typing import Any, Generator, Mapping, Optional
|
from typing import Any, Optional
|
||||||
|
from collections.abc import Generator, Mapping
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
|
@ -16,9 +16,7 @@ import yaml
|
||||||
def subprocess_run(*args: str, **kwargs) -> str:
|
def subprocess_run(*args: str, **kwargs) -> str:
|
||||||
"""Execute a command in a subprocess while properly capturing stderr in exceptions."""
|
"""Execute a command in a subprocess while properly capturing stderr in exceptions."""
|
||||||
try:
|
try:
|
||||||
p = subprocess.run(
|
p = subprocess.run(args, capture_output=True, check=True, **kwargs)
|
||||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, **kwargs
|
|
||||||
)
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
print(f"Command {args} failed with stderr: {e.stderr.decode()}")
|
print(f"Command {args} failed with stderr: {e.stderr.decode()}")
|
||||||
print(f"Command {args} failed with stdout: {e.stdout.decode()}")
|
print(f"Command {args} failed with stdout: {e.stdout.decode()}")
|
||||||
|
@ -382,7 +380,7 @@ def read_windows_registry(target_path): # pragma: no cover
|
||||||
|
|
||||||
try:
|
try:
|
||||||
key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_READ)
|
key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_READ)
|
||||||
except EnvironmentError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
return None, None
|
return None, None
|
||||||
|
@ -410,7 +408,7 @@ def write_windows_registry(target_path, value_value, value_type): # pragma: no
|
||||||
main_key = getattr(winreg, main_key)
|
main_key = getattr(winreg, main_key)
|
||||||
try:
|
try:
|
||||||
key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_WRITE)
|
key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_WRITE)
|
||||||
except EnvironmentError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
key = winreg.CreateKey(main_key, subkey_str)
|
key = winreg.CreateKey(main_key, subkey_str)
|
||||||
|
|
|
@ -46,7 +46,7 @@ class WindowsProfiles:
|
||||||
"-Command",
|
"-Command",
|
||||||
"$PROFILE.CurrentUserAllHosts",
|
"$PROFILE.CurrentUserAllHosts",
|
||||||
]
|
]
|
||||||
res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
|
res = subprocess.run(args, capture_output=True, check=True)
|
||||||
return res.stdout.decode("utf-8").strip()
|
return res.stdout.decode("utf-8").strip()
|
||||||
elif shell == "cmd.exe":
|
elif shell == "cmd.exe":
|
||||||
return None
|
return None
|
||||||
|
@ -185,8 +185,7 @@ def call_interpreter(s, tmp_path, interpreter, interactive=False, env=None):
|
||||||
try:
|
try:
|
||||||
res = subprocess.run(
|
res = subprocess.run(
|
||||||
args,
|
args,
|
||||||
stdout=subprocess.PIPE,
|
capture_output=True,
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
check=True,
|
check=True,
|
||||||
env=env,
|
env=env,
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
|
@ -273,7 +272,7 @@ def shvar(v, interpreter):
|
||||||
|
|
||||||
def env_to_dict(out, interpreter="bash"):
|
def env_to_dict(out, interpreter="bash"):
|
||||||
if interpreter == "cmd.exe":
|
if interpreter == "cmd.exe":
|
||||||
with open(out, "r") as f:
|
with open(out) as f:
|
||||||
out = f.read()
|
out = f.read()
|
||||||
|
|
||||||
if interpreter == "fish":
|
if interpreter == "fish":
|
||||||
|
|
|
@ -159,7 +159,7 @@ def test_env_remove(tmp_home, tmp_root_prefix):
|
||||||
env_json = helpers.run_env("list", "--json")
|
env_json = helpers.run_env("list", "--json")
|
||||||
assert str(env_fp) in env_json["envs"]
|
assert str(env_fp) in env_json["envs"]
|
||||||
assert env_fp.exists()
|
assert env_fp.exists()
|
||||||
with open(conda_env_file, "r", encoding="utf-8") as f:
|
with open(conda_env_file, encoding="utf-8") as f:
|
||||||
lines = [line.strip() for line in f]
|
lines = [line.strip() for line in f]
|
||||||
assert str(env_fp) in lines
|
assert str(env_fp) in lines
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ def test_env_remove(tmp_home, tmp_root_prefix):
|
||||||
env_json = helpers.run_env("list", "--json")
|
env_json = helpers.run_env("list", "--json")
|
||||||
assert str(env_fp) not in env_json["envs"]
|
assert str(env_fp) not in env_json["envs"]
|
||||||
assert not env_fp.exists()
|
assert not env_fp.exists()
|
||||||
with open(conda_env_file, "r", encoding="utf-8") as f:
|
with open(conda_env_file, encoding="utf-8") as f:
|
||||||
lines = [line.strip() for line in f]
|
lines = [line.strip() for line in f]
|
||||||
assert str(env_fp) not in lines
|
assert str(env_fp) not in lines
|
||||||
|
|
||||||
|
|
|
@ -52,8 +52,8 @@ def test_extract(cph_test_file: Path, tmp_path: Path):
|
||||||
dest_dir=str(tmp_path / "cph" / "cph_test_data-0.0.1-0"),
|
dest_dir=str(tmp_path / "cph" / "cph_test_data-0.0.1-0"),
|
||||||
)
|
)
|
||||||
|
|
||||||
conda = set((p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*")))
|
conda = {p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*")}
|
||||||
mamba = set((p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*")))
|
mamba = {p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*")}
|
||||||
assert conda == mamba
|
assert conda == mamba
|
||||||
|
|
||||||
extracted = cph_test_file.name.removesuffix(".tar.bz2")
|
extracted = cph_test_file.name.removesuffix(".tar.bz2")
|
||||||
|
|
|
@ -75,10 +75,10 @@ def test_proxy_install(
|
||||||
|
|
||||||
if auth is not None:
|
if auth is not None:
|
||||||
proxy_options = ["--proxyauth", urllib.parse.unquote(auth)]
|
proxy_options = ["--proxyauth", urllib.parse.unquote(auth)]
|
||||||
proxy_url = "http://{}@localhost:{}".format(auth, unused_tcp_port)
|
proxy_url = f"http://{auth}@localhost:{unused_tcp_port}"
|
||||||
else:
|
else:
|
||||||
proxy_options = []
|
proxy_options = []
|
||||||
proxy_url = "http://localhost:{}".format(unused_tcp_port)
|
proxy_url = f"http://localhost:{unused_tcp_port}"
|
||||||
|
|
||||||
proxy = MitmProxy(
|
proxy = MitmProxy(
|
||||||
exe=mitmdump_exe,
|
exe=mitmdump_exe,
|
||||||
|
@ -92,9 +92,9 @@ def test_proxy_install(
|
||||||
|
|
||||||
file_content = [
|
file_content = [
|
||||||
"proxy_servers:",
|
"proxy_servers:",
|
||||||
" http: {}".format(proxy_url),
|
f" http: {proxy_url}",
|
||||||
" https: {}".format(proxy_url),
|
f" https: {proxy_url}",
|
||||||
"ssl_verify: {}".format(verify_string),
|
f"ssl_verify: {verify_string}",
|
||||||
]
|
]
|
||||||
with open(rc_file, "w") as f:
|
with open(rc_file, "w") as f:
|
||||||
f.write("\n".join(file_content))
|
f.write("\n".join(file_content))
|
||||||
|
@ -110,7 +110,7 @@ def test_proxy_install(
|
||||||
|
|
||||||
proxy.stop_proxy()
|
proxy.stop_proxy()
|
||||||
|
|
||||||
with open(proxy.dump, "r") as f:
|
with open(proxy.dump) as f:
|
||||||
proxied_requests = f.read().splitlines()
|
proxied_requests = f.read().splitlines()
|
||||||
|
|
||||||
for fetch in res["actions"]["FETCH"]:
|
for fetch in res["actions"]["FETCH"]:
|
||||||
|
|
|
@ -127,7 +127,7 @@ def test_remove_in_use(tmp_home, tmp_root_prefix, tmp_xtensor_env, tmp_env_name)
|
||||||
assert trash_file.exists()
|
assert trash_file.exists()
|
||||||
all_trash_files = list(Path(tmp_xtensor_env).rglob("*.mamba_trash"))
|
all_trash_files = list(Path(tmp_xtensor_env).rglob("*.mamba_trash"))
|
||||||
|
|
||||||
with open(trash_file, "r") as fi:
|
with open(trash_file) as fi:
|
||||||
lines = [x.strip() for x in fi.readlines()]
|
lines = [x.strip() for x in fi.readlines()]
|
||||||
assert all([line.endswith(".mamba_trash") for line in lines])
|
assert all([line.endswith(".mamba_trash") for line in lines])
|
||||||
assert len(all_trash_files) == len(lines)
|
assert len(all_trash_files) == len(lines)
|
||||||
|
@ -144,7 +144,7 @@ def test_remove_in_use(tmp_home, tmp_root_prefix, tmp_xtensor_env, tmp_env_name)
|
||||||
assert trash_file.exists()
|
assert trash_file.exists()
|
||||||
assert pyexe_trash.exists()
|
assert pyexe_trash.exists()
|
||||||
|
|
||||||
with open(trash_file, "r") as fi:
|
with open(trash_file) as fi:
|
||||||
lines = [x.strip() for x in fi.readlines()]
|
lines = [x.strip() for x in fi.readlines()]
|
||||||
assert all([line.endswith(".mamba_trash") for line in lines])
|
assert all([line.endswith(".mamba_trash") for line in lines])
|
||||||
assert len(all_trash_files) == len(lines)
|
assert len(all_trash_files) == len(lines)
|
||||||
|
@ -155,7 +155,7 @@ def test_remove_in_use(tmp_home, tmp_root_prefix, tmp_xtensor_env, tmp_env_name)
|
||||||
assert trash_file.exists() is False
|
assert trash_file.exists() is False
|
||||||
assert pyexe_trash.exists() is False
|
assert pyexe_trash.exists() is False
|
||||||
|
|
||||||
subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=pyproc.pid))
|
subprocess.Popen(f"TASKKILL /F /PID {pyproc.pid} /T")
|
||||||
# check that another env mod clears lingering trash files
|
# check that another env mod clears lingering trash files
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
helpers.install("xsimd", "-n", tmp_env_name, "--json", no_dry_run=True)
|
helpers.install("xsimd", "-n", tmp_env_name, "--json", no_dry_run=True)
|
||||||
|
|
|
@ -80,7 +80,7 @@ class TestRun:
|
||||||
test_script_path = os.path.join(os.path.dirname(__file__), test_script_file_name)
|
test_script_path = os.path.join(os.path.dirname(__file__), test_script_file_name)
|
||||||
if not os.path.isfile(test_script_path):
|
if not os.path.isfile(test_script_path):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"missing test script '{}' at '{}".format(test_script_file_name, test_script_path)
|
f"missing test script '{test_script_file_name}' at '{test_script_path}"
|
||||||
)
|
)
|
||||||
subprocess_run(test_script_path, shell=True)
|
subprocess_run(test_script_path, shell=True)
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ def apply_changelog(name, version_name, changes):
|
||||||
|
|
||||||
if name in templates:
|
if name in templates:
|
||||||
template = templates[name]
|
template = templates[name]
|
||||||
with open(template, "r") as fi:
|
with open(template) as fi:
|
||||||
final = template_substitute(fi.read())
|
final = template_substitute(fi.read())
|
||||||
with open(template[: -len(".tmpl")], "w") as fo:
|
with open(template[: -len(".tmpl")], "w") as fo:
|
||||||
fo.write(final)
|
fo.write(final)
|
||||||
|
@ -53,7 +53,7 @@ def apply_changelog(name, version_name, changes):
|
||||||
res += "\n"
|
res += "\n"
|
||||||
|
|
||||||
cl_file = name + "/CHANGELOG.md"
|
cl_file = name + "/CHANGELOG.md"
|
||||||
with open(cl_file, "r") as fi:
|
with open(cl_file) as fi:
|
||||||
prev_cl = fi.read()
|
prev_cl = fi.read()
|
||||||
with open(cl_file, "w") as fo:
|
with open(cl_file, "w") as fo:
|
||||||
fo.write(res + prev_cl)
|
fo.write(res + prev_cl)
|
||||||
|
@ -123,7 +123,7 @@ def populate_changes(name, sections, changes):
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
changes = {}
|
changes = {}
|
||||||
with open("CHANGELOG.md", "r") as fi:
|
with open("CHANGELOG.md") as fi:
|
||||||
contents = fi.readlines()
|
contents = fi.readlines()
|
||||||
|
|
||||||
for idx, line in enumerate(contents):
|
for idx, line in enumerate(contents):
|
||||||
|
|
|
@ -34,9 +34,7 @@ def validate_date(date_str):
|
||||||
def subprocess_run(*args: str, **kwargs) -> str:
|
def subprocess_run(*args: str, **kwargs) -> str:
|
||||||
"""Execute a command in a subprocess while properly capturing stderr in exceptions."""
|
"""Execute a command in a subprocess while properly capturing stderr in exceptions."""
|
||||||
try:
|
try:
|
||||||
p = subprocess.run(
|
p = subprocess.run(args, capture_output=True, check=True, **kwargs)
|
||||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, **kwargs
|
|
||||||
)
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
print(f"Command {args} failed with stderr: {e.stderr.decode()}")
|
print(f"Command {args} failed with stderr: {e.stderr.decode()}")
|
||||||
print(f"Command {args} failed with stdout: {e.stdout.decode()}")
|
print(f"Command {args} failed with stdout: {e.stdout.decode()}")
|
||||||
|
@ -45,18 +43,18 @@ def subprocess_run(*args: str, **kwargs) -> str:
|
||||||
|
|
||||||
|
|
||||||
def append_to_file(ctgr_name, prs, out_file):
|
def append_to_file(ctgr_name, prs, out_file):
|
||||||
out_file.write("\n{}:\n\n".format(ctgr_name))
|
out_file.write(f"\n{ctgr_name}:\n\n")
|
||||||
for pr in prs:
|
for pr in prs:
|
||||||
# Author
|
# Author
|
||||||
pr_author_cmd = "gh pr view {} --json author".format(pr)
|
pr_author_cmd = f"gh pr view {pr} --json author"
|
||||||
author_login = dict(json.loads(subprocess_run(*pr_author_cmd.split()).decode("utf-8")))[
|
author_login = dict(json.loads(subprocess_run(*pr_author_cmd.split()).decode("utf-8")))[
|
||||||
"author"
|
"author"
|
||||||
]["login"]
|
]["login"]
|
||||||
# Title
|
# Title
|
||||||
pr_title_cmd = "gh pr view {} --json title".format(pr)
|
pr_title_cmd = f"gh pr view {pr} --json title"
|
||||||
title = dict(json.loads(subprocess_run(*pr_title_cmd.split()).decode("utf-8")))["title"]
|
title = dict(json.loads(subprocess_run(*pr_title_cmd.split()).decode("utf-8")))["title"]
|
||||||
# URL
|
# URL
|
||||||
pr_url_cmd = "gh pr view {} --json url".format(pr)
|
pr_url_cmd = f"gh pr view {pr} --json url"
|
||||||
url = dict(json.loads(subprocess_run(*pr_url_cmd.split()).decode("utf-8")))["url"]
|
url = dict(json.loads(subprocess_run(*pr_url_cmd.split()).decode("utf-8")))["url"]
|
||||||
# Files
|
# Files
|
||||||
# Use a different command with graphql allowing pagination
|
# Use a different command with graphql allowing pagination
|
||||||
|
@ -110,7 +108,7 @@ def main():
|
||||||
|
|
||||||
for pr in prs_nbrs:
|
for pr in prs_nbrs:
|
||||||
# Get labels
|
# Get labels
|
||||||
pr_labels_cmd = "gh pr view {} --json labels".format(pr)
|
pr_labels_cmd = f"gh pr view {pr} --json labels"
|
||||||
labels = dict(json.loads(subprocess_run(*pr_labels_cmd.split()).decode("utf-8")))["labels"]
|
labels = dict(json.loads(subprocess_run(*pr_labels_cmd.split()).decode("utf-8")))["labels"]
|
||||||
nb_rls_lbls_types = 0
|
nb_rls_lbls_types = 0
|
||||||
label = ""
|
label = ""
|
||||||
|
@ -121,7 +119,7 @@ def main():
|
||||||
|
|
||||||
# Only one release label should be set
|
# Only one release label should be set
|
||||||
if nb_rls_lbls_types == 0:
|
if nb_rls_lbls_types == 0:
|
||||||
raise ValueError("No release label is set for PR #{}".format(pr))
|
raise ValueError(f"No release label is set for PR #{pr}")
|
||||||
elif nb_rls_lbls_types > 1:
|
elif nb_rls_lbls_types > 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Only one release label should be set. PR #{} has {} labels.".format(
|
"Only one release label should be set. PR #{} has {} labels.".format(
|
||||||
|
@ -137,7 +135,7 @@ def main():
|
||||||
elif label == "release::ci_docs":
|
elif label == "release::ci_docs":
|
||||||
ci_docs_prs.append(pr)
|
ci_docs_prs.append(pr)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown release label {} for PR #{}".format(label, pr))
|
raise ValueError(f"Unknown release label {label} for PR #{pr}")
|
||||||
|
|
||||||
with open("CHANGELOG.md", "r+") as changelog_file:
|
with open("CHANGELOG.md", "r+") as changelog_file:
|
||||||
# Make sure we're appending at the beginning of the file
|
# Make sure we're appending at the beginning of the file
|
||||||
|
|
|
@ -41,11 +41,11 @@ class version_info:
|
||||||
version_errors = []
|
version_errors = []
|
||||||
|
|
||||||
if not self.major.isdigit():
|
if not self.major.isdigit():
|
||||||
version_errors.append("'{}' is not a valid major version number".format(self.major))
|
version_errors.append(f"'{self.major}' is not a valid major version number")
|
||||||
if not self.minor.isdigit():
|
if not self.minor.isdigit():
|
||||||
version_errors.append("'{}' is not a valid minor version number".format(self.minor))
|
version_errors.append(f"'{self.minor}' is not a valid minor version number")
|
||||||
if not self.patch.isdigit():
|
if not self.patch.isdigit():
|
||||||
version_errors.append("'{}' is not a valid patch version number".format(self.patch))
|
version_errors.append(f"'{self.patch}' is not a valid patch version number")
|
||||||
|
|
||||||
if self.pre_release != "" and not self.pre_release.startswith(
|
if self.pre_release != "" and not self.pre_release.startswith(
|
||||||
VALID_VERSION_PRERELEASE_TYPES
|
VALID_VERSION_PRERELEASE_TYPES
|
||||||
|
@ -57,13 +57,13 @@ class version_info:
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(version_errors) > 0:
|
if len(version_errors) > 0:
|
||||||
error_message = "'{}' is not a valid version name:".format(version)
|
error_message = f"'{version}' is not a valid version name:"
|
||||||
for error in version_errors:
|
for error in version_errors:
|
||||||
error_message += "\n - {}".format(error)
|
error_message += f"\n - {error}"
|
||||||
hint = (
|
hint = (
|
||||||
"examples of valid versions: 1.2.3, 0.1.2, 1.2.3.alpha0, 1.2.3.beta1, 3.4.5.beta.2"
|
"examples of valid versions: 1.2.3, 0.1.2, 1.2.3.alpha0, 1.2.3.beta1, 3.4.5.beta.2"
|
||||||
)
|
)
|
||||||
error_message += "\n{}".format(hint)
|
error_message += f"\n{hint}"
|
||||||
raise ValueError(error_message)
|
raise ValueError(error_message)
|
||||||
|
|
||||||
self.name = version
|
self.name = version
|
||||||
|
|
Loading…
Reference in New Issue