Use ruff instead of black, flake8, autoflake, pyupgrade

Config adapted from pytest.
This commit is contained in:
Ran Benita 2024-04-02 23:23:31 +03:00
parent c01de1c73e
commit 816c9dcda1
15 changed files with 109 additions and 73 deletions

View File

@ -1,14 +1,10 @@
repos:
- repo: https://github.com/PyCQA/autoflake
rev: v2.3.1
hooks:
- id: autoflake
args: ["--in-place", "--remove-unused-variables", "--remove-all-unused-imports"]
- repo: https://github.com/psf/black
rev: 24.3.0
hooks:
- id: black
args: [--safe, --quiet, --target-version, py35]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.3.5"
hooks:
- id: ruff
args: ["--fix"]
- id: ruff-format
- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
hooks:
@ -17,19 +13,7 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: debug-statements
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.1
hooks:
- id: pyupgrade
args: [--py3-plus]
- repo: local
hooks:
- id: rst

View File

@ -67,10 +67,72 @@ include-package-data = false
[tool.setuptools_scm]
write_to = "src/xdist/_version.py"
[tool.flake8]
# Ignore any errors related to formatting, let black worry/fix them.
ignore = ["E501", "W503", "E203"]
max-line-length = 100
[tool.ruff]
src = ["src"]
[tool.ruff.format]
docstring-code-format = true
[tool.ruff.lint]
select = [
"B", # bugbear
"D", # pydocstyle
"E", # pycodestyle
"F", # pyflakes
"I", # isort
"PYI", # flake8-pyi
"UP", # pyupgrade
"RUF", # ruff
"W", # pycodestyle
"T10", # flake8-debugger
"PIE", # flake8-pie
"PGH", # pygrep-hooks
"PLE", # pylint error
"PLW", # pylint warning
"PLR1714", # Consider merging multiple comparisons
]
ignore = [
# bugbear ignore
"B011", # Do not `assert False` (`python -O` removes these calls)
"B028", # No explicit `stacklevel` keyword argument found
# pydocstyle ignore
"D100", # Missing docstring in public module
"D101", # Missing docstring in public class
"D102", # Missing docstring in public method
"D103", # Missing docstring in public function
"D104", # Missing docstring in public package
"D105", # Missing docstring in magic method
"D106", # Missing docstring in public nested class
"D107", # Missing docstring in `__init__`
"D209", # Multi-line docstring closing quotes should be on a separate line
"D205", # 1 blank line required between summary line and description
"D400", # First line should end with a period
"D401", # First line of docstring should be in imperative mood
# ruff ignore
"RUF012", # Mutable class attributes should be annotated with `typing.ClassVar`
# pylint ignore
"PLW0603", # Using the global statement
"PLW0120", # remove the else and dedent its contents
"PLW2901", # for loop variable overwritten by assignment target
"PLR5501", # Use `elif` instead of `else` then `if`
]
[tool.ruff.lint.pycodestyle]
# In order to be able to format for 88 char in ruff format
max-line-length = 120
[tool.ruff.lint.pydocstyle]
convention = "pep257"
[tool.ruff.lint.isort]
force-single-line = true
combine-as-imports = true
force-sort-within-sections = true
order-by-type = false
lines-after-imports = 2
[tool.ruff.lint.per-file-ignores]
"src/xdist/_version.py" = ["I001"]
[tool.mypy]
mypy_path = ["src"]

View File

@ -230,9 +230,7 @@ class DSession:
)
if maximum_reached:
if self._max_worker_restart == 0:
msg = "worker {} crashed and worker restarting disabled".format(
node.gateway.id
)
msg = f"worker {node.gateway.id} crashed and worker restarting disabled"
else:
msg = "maximum crashed workers reached: %d" % self._max_worker_restart
self._summary_report = msg
@ -463,7 +461,7 @@ class TerminalDistReporter:
rinfo = gateway._rinfo()
different_interpreter = rinfo.executable != sys.executable
if different_interpreter:
version = "%s.%s.%s" % rinfo.version_info[:3]
version = "{}.{}.{}".format(*rinfo.version_info[:3])
self.rewrite(
f"[{gateway.id}] {rinfo.platform} Python {version} cwd: {rinfo.cwd}",
newline=True,
@ -504,7 +502,7 @@ def get_default_max_worker_restart(config):
def get_workers_status_line(
status_and_items: Sequence[tuple[WorkerStatus, int]]
status_and_items: Sequence[tuple[WorkerStatus, int]],
) -> str:
"""
Return the line to display during worker setup/collection based on the

View File

@ -254,7 +254,7 @@ class StatRecorder:
return
time.sleep(checkinterval)
def check(self, removepycfiles: bool = True) -> bool: # noqa, too complex
def check(self, removepycfiles: bool = True) -> bool:
changed = False
newstat: Dict[Path, os.stat_result] = {}
for rootdir in self.rootdirlist:

View File

@ -296,7 +296,7 @@ def pytest_cmdline_main(config):
if not val("collectonly") and _is_distribution_mode(config) and usepdb:
raise pytest.UsageError(
"--pdb is incompatible with distributing tests; try using -n0 or -nauto."
) # noqa: E501
)
# -------------------------------------------------------------------------

View File

@ -325,7 +325,7 @@ def setup_config(config, basetemp):
if __name__ == "__channelexec__":
channel = channel # type: ignore[name-defined] # noqa: F821
channel = channel # type: ignore[name-defined] # noqa: F821, PLW0127
workerinput, args, option_dict, change_sys_path = channel.receive() # type: ignore[name-defined]
if change_sys_path is None:

View File

@ -243,7 +243,7 @@ class LoadScheduling:
return
# Collections are identical, create the index of pending items.
self.collection = list(self.node2collection.values())[0]
self.collection = next(iter(self.node2collection.values()))
self.pending[:] = range(len(self.collection))
if not self.collection:
return
@ -260,7 +260,7 @@ class LoadScheduling:
# to each node - which is suboptimal when you have less than
# 2 * len(nodes) tests.
nodes = cycle(self.nodes)
for i in range(len(self.pending)):
for _ in range(len(self.pending)):
self._send_tests(next(nodes), 1)
else:
# Send batches of consecutive tests. By default, pytest sorts tests

View File

@ -208,7 +208,6 @@ class LoadScopeScheduling:
- ``DSession.worker_collectionfinish``.
"""
# Check that add_node() was called on the node before
assert node in self.assigned_work
@ -301,7 +300,6 @@ class LoadScopeScheduling:
If there are any globally pending work units left then this will check
if the given node should be given any more tests.
"""
# Do not add more work to a node shutting down
if node.shutting_down:
return

View File

@ -1,4 +1,7 @@
from collections import namedtuple
from __future__ import annotations
from typing import Any
from typing import NamedTuple
from _pytest.runner import CollectReport
@ -7,7 +10,10 @@ from xdist.report import report_collection_diff
from xdist.workermanage import parse_spec_config
NodePending = namedtuple("NodePending", ["node", "pending"])
class NodePending(NamedTuple):
node: Any
pending: list[int]
# Every worker needs at least 2 tests in queue - the current and the next one.
MIN_PENDING = 2
@ -285,7 +291,7 @@ class WorkStealingScheduling:
return
# Collections are identical, create the index of pending items.
self.collection = list(self.node2collection.values())[0]
self.collection = next(iter(self.node2collection.values()))
self.pending[:] = range(len(self.collection))
if not self.collection:
return

View File

@ -175,7 +175,7 @@ class HostRSync(execnet.RSync):
sourcedir: PathLike,
*,
ignores: Optional[Sequence[PathLike]] = None,
**kwargs: object
**kwargs: object,
) -> None:
if ignores is None:
ignores = []
@ -322,8 +322,8 @@ class WorkerController:
self.log(f"queuing {eventname}(**{kwargs})")
self.putevent((eventname, kwargs))
def process_from_remote(self, eventcall): # noqa too complex
"""this gets called for each object we receive from
def process_from_remote(self, eventcall):
"""This gets called for each object we receive from
the other side and if the channel closes.
Note that channel callbacks run in the receiver
@ -400,7 +400,7 @@ class WorkerController:
except KeyboardInterrupt:
# should not land in receiver-thread
raise
except: # noqa
except BaseException:
from _pytest._code import ExceptionInfo
excinfo = ExceptionInfo.from_current()

View File

@ -266,8 +266,8 @@ class TestDistribution:
"-pfoobarplugin",
"--foobar=123",
"--dist=load",
"--rsyncdir=%(subdir)s" % locals(),
"--tx=popen//chdir=%(dest)s" % locals(),
f"--rsyncdir={subdir}",
f"--tx=popen//chdir={dest}",
p,
)
assert result.ret == 0
@ -657,9 +657,7 @@ def test_crashing_item(pytester, when) -> None:
def test_ok():
pass
""".format(
**code
)
""".format(**code)
)
passes = 2 if when == "teardown" else 1
result = pytester.runpytest("-n2", p)
@ -1513,9 +1511,7 @@ class TestLocking:
FILE_LOCK = filelock.FileLock("test.lock")
""" + (
(_test_content * 4) % ("A", "B", "C", "D")
)
""" + ((_test_content * 4) % ("A", "B", "C", "D"))
@pytest.mark.parametrize("scope", ["each", "load", "loadscope", "loadfile", "no"])
def test_single_file(self, pytester, scope) -> None:

View File

@ -165,7 +165,7 @@ class TestLoadScheduling:
for i in range(7, 16):
sched.mark_test_complete(node1, i - 3)
assert node1.sent == [0, 1] + list(range(4, i))
assert node1.sent == [0, 1, *range(4, i)]
assert node2.sent == [2, 3]
assert sched.pending == list(range(i, 16))
@ -187,7 +187,7 @@ class TestLoadScheduling:
for complete_index, first_pending in enumerate(range(5, 16)):
sched.mark_test_complete(node1, node1.sent[complete_index])
assert node1.sent == [0, 1] + list(range(4, first_pending))
assert node1.sent == [0, 1, *range(4, first_pending)]
assert node2.sent == [2, 3]
assert sched.pending == list(range(first_pending, 16))
@ -251,9 +251,7 @@ class TestLoadScheduling:
"""
class CollectHook:
"""
Dummy hook that stores collection reports.
"""
"""Dummy hook that stores collection reports."""
def __init__(self):
self.reports = []
@ -295,7 +293,7 @@ class TestWorkStealingScheduling:
sched.schedule()
assert not sched.pending
assert not sched.tests_finished
assert node1.sent == list(range(0, 8))
assert node1.sent == list(range(8))
assert node2.sent == list(range(8, 16))
for i in range(8):
sched.mark_test_complete(node1, node1.sent[i])
@ -315,7 +313,7 @@ class TestWorkStealingScheduling:
sched.add_node_collection(node2, collection)
assert sched.collection_is_completed
sched.schedule()
assert node1.sent == list(range(0, 8))
assert node1.sent == list(range(8))
assert node2.sent == list(range(8, 16))
for i in range(8):
sched.mark_test_complete(node1, node1.sent[i])

View File

@ -13,7 +13,7 @@ from xdist.workermanage import NodeManager
def monkeypatch_3_cpus(monkeypatch: pytest.MonkeyPatch):
"""Make pytest-xdist believe the system has 3 CPUs."""
# block import
monkeypatch.setitem(sys.modules, "psutil", None) # type: ignore
monkeypatch.setitem(sys.modules, "psutil", None)
monkeypatch.delattr(os, "sched_getaffinity", raising=False)
monkeypatch.setattr(os, "cpu_count", lambda: 3)

View File

@ -16,9 +16,9 @@ WAIT_TIMEOUT = 10.0
def check_marshallable(d):
try:
marshal.dumps(d)
except ValueError:
except ValueError as e:
pprint.pprint(d)
raise ValueError("not marshallable")
raise ValueError("not marshallable") from e
class EventCall:
@ -164,7 +164,7 @@ class TestWorkerInteractor:
worker.sendcommand("runtests_all")
worker.sendcommand("shutdown")
for func in "::test_func", "::test_func2":
for i in range(3): # setup/call/teardown
for _ in range(3): # setup/call/teardown
ev = worker.popevent("testreport")
assert ev.name == "testreport"
rep = unserialize_report(ev.kwargs["data"])
@ -329,12 +329,10 @@ def test_remote_mainargv(pytester: pytest.Pytester) -> None:
outer_argv = sys.argv
pytester.makepyfile(
"""
f"""
def test_mainargv(request):
assert request.config.workerinput["mainargv"] == {!r}
""".format(
outer_argv
)
assert request.config.workerinput["mainargv"] == {outer_argv!r}
"""
)
result = pytester.runpytest("-n1")
assert result.ret == 0

View File

@ -63,7 +63,3 @@ commands =
# it so they don't conflict with each other (#611).
addopts = -ra -p no:pytest-services
testpaths = testing
[flake8]
max-line-length = 120
ignore = E203,W503