mirror of https://github.com/mamba-org/mamba.git
maint: Add pyupgrade pre-commit hook (#3671)
This commit is contained in:
parent
9d6281c5ed
commit
f67914ef8a
|
@ -22,8 +22,13 @@ repos:
|
|||
- id: rst-backticks
|
||||
- id: rst-directive-colons
|
||||
- id: rst-inline-touching-normal
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.19.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py39-plus]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.3
|
||||
rev: v0.8.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
|
|
|
@ -25,7 +25,6 @@ import sphinx
|
|||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
from docutils.statemachine import ViewList
|
||||
from six import text_type
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import _
|
||||
|
@ -96,7 +95,7 @@ class Mermaid(Directive):
|
|||
try:
|
||||
with codecs.open(filename, "r", "utf-8") as fp:
|
||||
mmcode = fp.read()
|
||||
except (IOError, OSError): # noqa
|
||||
except OSError: # noqa
|
||||
return [
|
||||
document.reporter.warning(
|
||||
"External Mermaid file %r not found or reading " "it failed" % filename,
|
||||
|
@ -144,8 +143,8 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
|||
"utf-8"
|
||||
)
|
||||
|
||||
basename = "%s-%s" % (prefix, sha1(hashkey).hexdigest())
|
||||
fname = "%s.%s" % (basename, fmt)
|
||||
basename = f"{prefix}-{sha1(hashkey).hexdigest()}"
|
||||
fname = f"{basename}.{fmt}"
|
||||
relfn = posixpath.join(self.builder.imgpath, fname)
|
||||
outdir = os.path.join(self.builder.outdir, self.builder.imagedir)
|
||||
outfn = os.path.join(outdir, fname)
|
||||
|
@ -157,7 +156,7 @@ def render_mm(self, code, options, fmt, prefix="mermaid"):
|
|||
ensuredir(os.path.dirname(outfn))
|
||||
|
||||
# mermaid expects UTF-8 by default
|
||||
if isinstance(code, text_type):
|
||||
if isinstance(code, str):
|
||||
code = code.encode("utf-8")
|
||||
|
||||
with open(tmpfn, "wb") as t:
|
||||
|
@ -235,8 +234,8 @@ def render_mm_html(self, node, code, options, prefix="mermaid", imgcls=None, alt
|
|||
alt = node.get("alt", self.encode(code).strip())
|
||||
imgcss = imgcls and 'class="%s"' % imgcls or ""
|
||||
if fmt == "svg":
|
||||
svgtag = """<object data="%s" type="image/svg+xml">
|
||||
<p class="warning">%s</p></object>\n""" % (
|
||||
svgtag = """<object data="{}" type="image/svg+xml">
|
||||
<p class="warning">{}</p></object>\n""".format(
|
||||
fname,
|
||||
alt,
|
||||
)
|
||||
|
@ -244,10 +243,10 @@ def render_mm_html(self, node, code, options, prefix="mermaid", imgcls=None, alt
|
|||
else:
|
||||
if "align" in node:
|
||||
self.body.append(
|
||||
'<div align="%s" class="align-%s">' % (node["align"], node["align"])
|
||||
'<div align="{}" class="align-{}">'.format(node["align"], node["align"])
|
||||
)
|
||||
|
||||
self.body.append('<img src="%s" alt="%s" %s/>\n' % (fname, alt, imgcss))
|
||||
self.body.append(f'<img src="{fname}" alt="{alt}" {imgcss}/>\n')
|
||||
if "align" in node:
|
||||
self.body.append("</div>\n")
|
||||
|
||||
|
@ -310,9 +309,7 @@ def render_mm_latex(self, node, code, options, prefix="mermaid"):
|
|||
elif node["align"] == "right":
|
||||
self.body.append("{\\hspace*{\\fill}")
|
||||
post = "}"
|
||||
self.body.append(
|
||||
"%s\\sphinxincludegraphics{%s}%s" % (para_separator, fname, para_separator)
|
||||
)
|
||||
self.body.append(f"{para_separator}\\sphinxincludegraphics{{{fname}}}{para_separator}")
|
||||
if post:
|
||||
self.body.append(post)
|
||||
|
||||
|
@ -356,7 +353,7 @@ def man_visit_mermaid(self, node):
|
|||
|
||||
def config_inited(app, config):
|
||||
version = config.mermaid_version
|
||||
mermaid_js_url = "https://unpkg.com/mermaid@{}/dist/mermaid.min.js".format(version)
|
||||
mermaid_js_url = f"https://unpkg.com/mermaid@{version}/dist/mermaid.min.js"
|
||||
app.add_js_file(mermaid_js_url)
|
||||
app.add_js_file(
|
||||
None,
|
||||
|
|
|
@ -29,7 +29,8 @@ r"""
|
|||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Iterable, List, cast
|
||||
from typing import Any, cast
|
||||
from collections.abc import Iterable
|
||||
|
||||
import sphinx
|
||||
from docutils import nodes
|
||||
|
@ -80,22 +81,22 @@ class MermaidGraph(InheritanceGraph):
|
|||
# 'style': '"setlinewidth(0.5)"',
|
||||
# }
|
||||
|
||||
def _format_node_attrs(self, attrs: Dict) -> str:
|
||||
def _format_node_attrs(self, attrs: dict) -> str:
|
||||
# return ','.join(['%s=%s' % x for x in sorted(attrs.items())])
|
||||
return ""
|
||||
|
||||
def _format_graph_attrs(self, attrs: Dict) -> str:
|
||||
def _format_graph_attrs(self, attrs: dict) -> str:
|
||||
# return ''.join(['%s=%s;\n' % x for x in sorted(attrs.items())])
|
||||
return ""
|
||||
|
||||
def generate_dot(
|
||||
self,
|
||||
name: str,
|
||||
urls: Dict = {}, # noqa
|
||||
urls: dict = {}, # noqa
|
||||
env: BuildEnvironment = None,
|
||||
graph_attrs: Dict = {}, # noqa
|
||||
node_attrs: Dict = {}, # noqa
|
||||
edge_attrs: Dict = {}, # noqa
|
||||
graph_attrs: dict = {}, # noqa
|
||||
node_attrs: dict = {}, # noqa
|
||||
edge_attrs: dict = {}, # noqa
|
||||
) -> str:
|
||||
"""Generate a mermaid graph from the classes that were passed in
|
||||
to __init__.
|
||||
|
@ -120,17 +121,17 @@ class MermaidGraph(InheritanceGraph):
|
|||
res.append("classDiagram\n")
|
||||
for name, fullname, bases, tooltip in sorted(self.class_info):
|
||||
# Write the node
|
||||
res.append(" class {!s}\n".format(name))
|
||||
res.append(f" class {name!s}\n")
|
||||
if fullname in urls:
|
||||
res.append(
|
||||
' link {!s} "./{!s}" {!s}\n'.format(
|
||||
name, urls[fullname], tooltip or '"{}"'.format(name)
|
||||
name, urls[fullname], tooltip or f'"{name}"'
|
||||
)
|
||||
)
|
||||
|
||||
# Write the edges
|
||||
for base_name in bases:
|
||||
res.append(" {!s} <|-- {!s}\n".format(base_name, name))
|
||||
res.append(f" {base_name!s} <|-- {name!s}\n")
|
||||
|
||||
return "".join(res)
|
||||
|
||||
|
@ -159,7 +160,7 @@ class MermaidDiagram(InheritanceDiagram):
|
|||
"top-classes": directives.unchanged_required,
|
||||
}
|
||||
|
||||
def run(self) -> List[Node]:
|
||||
def run(self) -> list[Node]:
|
||||
node = mermaid_inheritance()
|
||||
node.document = self.state.document
|
||||
class_names = self.arguments[0].split()
|
||||
|
@ -283,7 +284,7 @@ def texinfo_visit_mermaid_inheritance(self: TexinfoTranslator, node: inheritance
|
|||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
def setup(app: Sphinx) -> dict[str, Any]:
|
||||
app.setup_extension("mermaid")
|
||||
app.add_node(
|
||||
mermaid_inheritance,
|
||||
|
|
|
@ -2,4 +2,4 @@ version_info = ("2", "0", "5")
|
|||
version_prerelease = "dev0"
|
||||
__version__ = ".".join(map(str, version_info))
|
||||
if version_prerelease != "":
|
||||
__version__ = "{}.{}".format(__version__, version_prerelease)
|
||||
__version__ = f"{__version__}.{version_prerelease}"
|
||||
|
|
|
@ -7,7 +7,6 @@ import shutil
|
|||
import sys
|
||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
try:
|
||||
import conda_content_trust.authentication as cct_authentication
|
||||
|
@ -34,7 +33,7 @@ def get_fingerprint(gpg_output: str) -> str:
|
|||
return fpline
|
||||
|
||||
|
||||
KeySet = Dict[str, List[Dict[str, str]]]
|
||||
KeySet = dict[str, list[dict[str, str]]]
|
||||
|
||||
|
||||
def normalize_keys(keys: KeySet) -> KeySet:
|
||||
|
|
|
@ -2,7 +2,8 @@ import copy
|
|||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
from typing import Any, Generator, Mapping, Optional
|
||||
from typing import Any, Optional
|
||||
from collections.abc import Generator, Mapping
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
|
@ -16,9 +16,7 @@ import yaml
|
|||
def subprocess_run(*args: str, **kwargs) -> str:
|
||||
"""Execute a command in a subprocess while properly capturing stderr in exceptions."""
|
||||
try:
|
||||
p = subprocess.run(
|
||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, **kwargs
|
||||
)
|
||||
p = subprocess.run(args, capture_output=True, check=True, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Command {args} failed with stderr: {e.stderr.decode()}")
|
||||
print(f"Command {args} failed with stdout: {e.stdout.decode()}")
|
||||
|
@ -382,7 +380,7 @@ def read_windows_registry(target_path): # pragma: no cover
|
|||
|
||||
try:
|
||||
key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_READ)
|
||||
except EnvironmentError as e:
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
return None, None
|
||||
|
@ -410,7 +408,7 @@ def write_windows_registry(target_path, value_value, value_type): # pragma: no
|
|||
main_key = getattr(winreg, main_key)
|
||||
try:
|
||||
key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_WRITE)
|
||||
except EnvironmentError as e:
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
key = winreg.CreateKey(main_key, subkey_str)
|
||||
|
|
|
@ -46,7 +46,7 @@ class WindowsProfiles:
|
|||
"-Command",
|
||||
"$PROFILE.CurrentUserAllHosts",
|
||||
]
|
||||
res = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
|
||||
res = subprocess.run(args, capture_output=True, check=True)
|
||||
return res.stdout.decode("utf-8").strip()
|
||||
elif shell == "cmd.exe":
|
||||
return None
|
||||
|
@ -185,8 +185,7 @@ def call_interpreter(s, tmp_path, interpreter, interactive=False, env=None):
|
|||
try:
|
||||
res = subprocess.run(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
capture_output=True,
|
||||
check=True,
|
||||
env=env,
|
||||
encoding="utf-8",
|
||||
|
@ -273,7 +272,7 @@ def shvar(v, interpreter):
|
|||
|
||||
def env_to_dict(out, interpreter="bash"):
|
||||
if interpreter == "cmd.exe":
|
||||
with open(out, "r") as f:
|
||||
with open(out) as f:
|
||||
out = f.read()
|
||||
|
||||
if interpreter == "fish":
|
||||
|
|
|
@ -159,7 +159,7 @@ def test_env_remove(tmp_home, tmp_root_prefix):
|
|||
env_json = helpers.run_env("list", "--json")
|
||||
assert str(env_fp) in env_json["envs"]
|
||||
assert env_fp.exists()
|
||||
with open(conda_env_file, "r", encoding="utf-8") as f:
|
||||
with open(conda_env_file, encoding="utf-8") as f:
|
||||
lines = [line.strip() for line in f]
|
||||
assert str(env_fp) in lines
|
||||
|
||||
|
@ -168,7 +168,7 @@ def test_env_remove(tmp_home, tmp_root_prefix):
|
|||
env_json = helpers.run_env("list", "--json")
|
||||
assert str(env_fp) not in env_json["envs"]
|
||||
assert not env_fp.exists()
|
||||
with open(conda_env_file, "r", encoding="utf-8") as f:
|
||||
with open(conda_env_file, encoding="utf-8") as f:
|
||||
lines = [line.strip() for line in f]
|
||||
assert str(env_fp) not in lines
|
||||
|
||||
|
|
|
@ -52,8 +52,8 @@ def test_extract(cph_test_file: Path, tmp_path: Path):
|
|||
dest_dir=str(tmp_path / "cph" / "cph_test_data-0.0.1-0"),
|
||||
)
|
||||
|
||||
conda = set((p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*")))
|
||||
mamba = set((p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*")))
|
||||
conda = {p.relative_to(tmp_path / "cph") for p in (tmp_path / "cph").rglob("**/*")}
|
||||
mamba = {p.relative_to(tmp_path / "mm") for p in (tmp_path / "mm").rglob("**/*")}
|
||||
assert conda == mamba
|
||||
|
||||
extracted = cph_test_file.name.removesuffix(".tar.bz2")
|
||||
|
|
|
@ -75,10 +75,10 @@ def test_proxy_install(
|
|||
|
||||
if auth is not None:
|
||||
proxy_options = ["--proxyauth", urllib.parse.unquote(auth)]
|
||||
proxy_url = "http://{}@localhost:{}".format(auth, unused_tcp_port)
|
||||
proxy_url = f"http://{auth}@localhost:{unused_tcp_port}"
|
||||
else:
|
||||
proxy_options = []
|
||||
proxy_url = "http://localhost:{}".format(unused_tcp_port)
|
||||
proxy_url = f"http://localhost:{unused_tcp_port}"
|
||||
|
||||
proxy = MitmProxy(
|
||||
exe=mitmdump_exe,
|
||||
|
@ -92,9 +92,9 @@ def test_proxy_install(
|
|||
|
||||
file_content = [
|
||||
"proxy_servers:",
|
||||
" http: {}".format(proxy_url),
|
||||
" https: {}".format(proxy_url),
|
||||
"ssl_verify: {}".format(verify_string),
|
||||
f" http: {proxy_url}",
|
||||
f" https: {proxy_url}",
|
||||
f"ssl_verify: {verify_string}",
|
||||
]
|
||||
with open(rc_file, "w") as f:
|
||||
f.write("\n".join(file_content))
|
||||
|
@ -110,7 +110,7 @@ def test_proxy_install(
|
|||
|
||||
proxy.stop_proxy()
|
||||
|
||||
with open(proxy.dump, "r") as f:
|
||||
with open(proxy.dump) as f:
|
||||
proxied_requests = f.read().splitlines()
|
||||
|
||||
for fetch in res["actions"]["FETCH"]:
|
||||
|
|
|
@ -127,7 +127,7 @@ def test_remove_in_use(tmp_home, tmp_root_prefix, tmp_xtensor_env, tmp_env_name)
|
|||
assert trash_file.exists()
|
||||
all_trash_files = list(Path(tmp_xtensor_env).rglob("*.mamba_trash"))
|
||||
|
||||
with open(trash_file, "r") as fi:
|
||||
with open(trash_file) as fi:
|
||||
lines = [x.strip() for x in fi.readlines()]
|
||||
assert all([line.endswith(".mamba_trash") for line in lines])
|
||||
assert len(all_trash_files) == len(lines)
|
||||
|
@ -144,7 +144,7 @@ def test_remove_in_use(tmp_home, tmp_root_prefix, tmp_xtensor_env, tmp_env_name)
|
|||
assert trash_file.exists()
|
||||
assert pyexe_trash.exists()
|
||||
|
||||
with open(trash_file, "r") as fi:
|
||||
with open(trash_file) as fi:
|
||||
lines = [x.strip() for x in fi.readlines()]
|
||||
assert all([line.endswith(".mamba_trash") for line in lines])
|
||||
assert len(all_trash_files) == len(lines)
|
||||
|
@ -155,7 +155,7 @@ def test_remove_in_use(tmp_home, tmp_root_prefix, tmp_xtensor_env, tmp_env_name)
|
|||
assert trash_file.exists() is False
|
||||
assert pyexe_trash.exists() is False
|
||||
|
||||
subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=pyproc.pid))
|
||||
subprocess.Popen(f"TASKKILL /F /PID {pyproc.pid} /T")
|
||||
# check that another env mod clears lingering trash files
|
||||
time.sleep(0.5)
|
||||
helpers.install("xsimd", "-n", tmp_env_name, "--json", no_dry_run=True)
|
||||
|
|
|
@ -80,7 +80,7 @@ class TestRun:
|
|||
test_script_path = os.path.join(os.path.dirname(__file__), test_script_file_name)
|
||||
if not os.path.isfile(test_script_path):
|
||||
raise RuntimeError(
|
||||
"missing test script '{}' at '{}".format(test_script_file_name, test_script_path)
|
||||
f"missing test script '{test_script_file_name}' at '{test_script_path}"
|
||||
)
|
||||
subprocess_run(test_script_path, shell=True)
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ def apply_changelog(name, version_name, changes):
|
|||
|
||||
if name in templates:
|
||||
template = templates[name]
|
||||
with open(template, "r") as fi:
|
||||
with open(template) as fi:
|
||||
final = template_substitute(fi.read())
|
||||
with open(template[: -len(".tmpl")], "w") as fo:
|
||||
fo.write(final)
|
||||
|
@ -53,7 +53,7 @@ def apply_changelog(name, version_name, changes):
|
|||
res += "\n"
|
||||
|
||||
cl_file = name + "/CHANGELOG.md"
|
||||
with open(cl_file, "r") as fi:
|
||||
with open(cl_file) as fi:
|
||||
prev_cl = fi.read()
|
||||
with open(cl_file, "w") as fo:
|
||||
fo.write(res + prev_cl)
|
||||
|
@ -123,7 +123,7 @@ def populate_changes(name, sections, changes):
|
|||
|
||||
def main():
|
||||
changes = {}
|
||||
with open("CHANGELOG.md", "r") as fi:
|
||||
with open("CHANGELOG.md") as fi:
|
||||
contents = fi.readlines()
|
||||
|
||||
for idx, line in enumerate(contents):
|
||||
|
|
|
@ -34,9 +34,7 @@ def validate_date(date_str):
|
|||
def subprocess_run(*args: str, **kwargs) -> str:
|
||||
"""Execute a command in a subprocess while properly capturing stderr in exceptions."""
|
||||
try:
|
||||
p = subprocess.run(
|
||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, **kwargs
|
||||
)
|
||||
p = subprocess.run(args, capture_output=True, check=True, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Command {args} failed with stderr: {e.stderr.decode()}")
|
||||
print(f"Command {args} failed with stdout: {e.stdout.decode()}")
|
||||
|
@ -45,18 +43,18 @@ def subprocess_run(*args: str, **kwargs) -> str:
|
|||
|
||||
|
||||
def append_to_file(ctgr_name, prs, out_file):
|
||||
out_file.write("\n{}:\n\n".format(ctgr_name))
|
||||
out_file.write(f"\n{ctgr_name}:\n\n")
|
||||
for pr in prs:
|
||||
# Author
|
||||
pr_author_cmd = "gh pr view {} --json author".format(pr)
|
||||
pr_author_cmd = f"gh pr view {pr} --json author"
|
||||
author_login = dict(json.loads(subprocess_run(*pr_author_cmd.split()).decode("utf-8")))[
|
||||
"author"
|
||||
]["login"]
|
||||
# Title
|
||||
pr_title_cmd = "gh pr view {} --json title".format(pr)
|
||||
pr_title_cmd = f"gh pr view {pr} --json title"
|
||||
title = dict(json.loads(subprocess_run(*pr_title_cmd.split()).decode("utf-8")))["title"]
|
||||
# URL
|
||||
pr_url_cmd = "gh pr view {} --json url".format(pr)
|
||||
pr_url_cmd = f"gh pr view {pr} --json url"
|
||||
url = dict(json.loads(subprocess_run(*pr_url_cmd.split()).decode("utf-8")))["url"]
|
||||
# Files
|
||||
# Use a different command with graphql allowing pagination
|
||||
|
@ -110,7 +108,7 @@ def main():
|
|||
|
||||
for pr in prs_nbrs:
|
||||
# Get labels
|
||||
pr_labels_cmd = "gh pr view {} --json labels".format(pr)
|
||||
pr_labels_cmd = f"gh pr view {pr} --json labels"
|
||||
labels = dict(json.loads(subprocess_run(*pr_labels_cmd.split()).decode("utf-8")))["labels"]
|
||||
nb_rls_lbls_types = 0
|
||||
label = ""
|
||||
|
@ -121,7 +119,7 @@ def main():
|
|||
|
||||
# Only one release label should be set
|
||||
if nb_rls_lbls_types == 0:
|
||||
raise ValueError("No release label is set for PR #{}".format(pr))
|
||||
raise ValueError(f"No release label is set for PR #{pr}")
|
||||
elif nb_rls_lbls_types > 1:
|
||||
raise ValueError(
|
||||
"Only one release label should be set. PR #{} has {} labels.".format(
|
||||
|
@ -137,7 +135,7 @@ def main():
|
|||
elif label == "release::ci_docs":
|
||||
ci_docs_prs.append(pr)
|
||||
else:
|
||||
raise ValueError("Unknown release label {} for PR #{}".format(label, pr))
|
||||
raise ValueError(f"Unknown release label {label} for PR #{pr}")
|
||||
|
||||
with open("CHANGELOG.md", "r+") as changelog_file:
|
||||
# Make sure we're appending at the beginning of the file
|
||||
|
|
|
@ -41,11 +41,11 @@ class version_info:
|
|||
version_errors = []
|
||||
|
||||
if not self.major.isdigit():
|
||||
version_errors.append("'{}' is not a valid major version number".format(self.major))
|
||||
version_errors.append(f"'{self.major}' is not a valid major version number")
|
||||
if not self.minor.isdigit():
|
||||
version_errors.append("'{}' is not a valid minor version number".format(self.minor))
|
||||
version_errors.append(f"'{self.minor}' is not a valid minor version number")
|
||||
if not self.patch.isdigit():
|
||||
version_errors.append("'{}' is not a valid patch version number".format(self.patch))
|
||||
version_errors.append(f"'{self.patch}' is not a valid patch version number")
|
||||
|
||||
if self.pre_release != "" and not self.pre_release.startswith(
|
||||
VALID_VERSION_PRERELEASE_TYPES
|
||||
|
@ -57,13 +57,13 @@ class version_info:
|
|||
)
|
||||
|
||||
if len(version_errors) > 0:
|
||||
error_message = "'{}' is not a valid version name:".format(version)
|
||||
error_message = f"'{version}' is not a valid version name:"
|
||||
for error in version_errors:
|
||||
error_message += "\n - {}".format(error)
|
||||
error_message += f"\n - {error}"
|
||||
hint = (
|
||||
"examples of valid versions: 1.2.3, 0.1.2, 1.2.3.alpha0, 1.2.3.beta1, 3.4.5.beta.2"
|
||||
)
|
||||
error_message += "\n{}".format(hint)
|
||||
error_message += f"\n{hint}"
|
||||
raise ValueError(error_message)
|
||||
|
||||
self.name = version
|
||||
|
|
Loading…
Reference in New Issue