Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enforce ruff/flake8-comprehensions rules (C4) #4785

Draft
wants to merge 18 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
158 changes: 79 additions & 79 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,86 +11,86 @@
# Link dates and other references in the changelog
extensions += ['rst.linker']
link_files = {
'../NEWS.rst': dict(
using=dict(
BB='https://bitbucket.org',
GH='https://github.com',
),
replace=[
dict(
pattern=r'(Issue #|\B#)(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'(?m:^((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n)',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://peps.python.org/pep-{pep_number:0>4}/',
),
dict(
pattern=r'(?<!\w)PR #(?P<pull>\d+)',
url='{package_url}/pull/{pull}',
),
dict(
pattern=r'BB Pull Request ?#(?P<bb_pull_request>\d+)',
url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}',
),
dict(
pattern=r'Distribute #(?P<distribute>\d+)',
url='{BB}/tarek/distribute/issue/{distribute}',
),
dict(
pattern=r'Buildout #(?P<buildout>\d+)',
url='{GH}/buildout/buildout/issues/{buildout}',
),
dict(
pattern=r'Old Setuptools #(?P<old_setuptools>\d+)',
url='https://bugs.python.org/setuptools/issue{old_setuptools}',
),
dict(
pattern=r'Jython #(?P<jython>\d+)',
url='https://bugs.jython.org/issue{jython}',
),
dict(
pattern=r'(Python #|bpo-)(?P<python>\d+)',
url='https://bugs.python.org/issue{python}',
),
dict(
pattern=r'\bpython/cpython#(?P<cpython>\d+)',
url='{GH}/python/cpython/issues/{cpython}',
),
dict(
pattern=r'Interop #(?P<interop>\d+)',
url='{GH}/pypa/interoperability-peps/issues/{interop}',
),
dict(
pattern=r'Pip #(?P<pip>\d+)',
url='{GH}/pypa/pip/issues/{pip}',
),
dict(
pattern=r'Packaging #(?P<packaging>\d+)',
url='{GH}/pypa/packaging/issues/{packaging}',
),
dict(
pattern=r'[Pp]ackaging (?P<packaging_ver>\d+(\.\d+)+)',
url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',
),
dict(
pattern=r'setuptools_svn #(?P<setuptools_svn>\d+)',
url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}',
),
dict(
pattern=r'pypa/(?P<issue_repo>[\-\.\w]+)#(?P<issue_number>\d+)',
url='{GH}/pypa/{issue_repo}/issues/{issue_number}',
),
dict(
pattern=r'pypa/(?P<commit_repo>[\-\.\w]+)@(?P<commit_number>[\da-f]+)',
url='{GH}/pypa/{commit_repo}/commit/{commit_number}',
),
'../NEWS.rst': {
'using': {
'BB': 'https://bitbucket.org',
'GH': 'https://github.com',
},
'replace': [
{
'pattern': r'(Issue #|\B#)(?P<issue>\d+)',
'url': '{package_url}/issues/{issue}',
},
{
'pattern': r'(?m:^((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n)',
'with_scm': '{text}\n{rev[timestamp]:%d %b %Y}\n',
},
{
'pattern': r'PEP[- ](?P<pep_number>\d+)',
'url': 'https://peps.python.org/pep-{pep_number:0>4}/',
},
{
'pattern': r'(?<!\w)PR #(?P<pull>\d+)',
'url': '{package_url}/pull/{pull}',
},
{
'pattern': r'BB Pull Request ?#(?P<bb_pull_request>\d+)',
'url': '{BB}/pypa/setuptools/pull-request/{bb_pull_request}',
},
{
'pattern': r'Distribute #(?P<distribute>\d+)',
'url': '{BB}/tarek/distribute/issue/{distribute}',
},
{
'pattern': r'Buildout #(?P<buildout>\d+)',
'url': '{GH}/buildout/buildout/issues/{buildout}',
},
{
'pattern': r'Old Setuptools #(?P<old_setuptools>\d+)',
'url': 'https://bugs.python.org/setuptools/issue{old_setuptools}',
},
{
'pattern': r'Jython #(?P<jython>\d+)',
'url': 'https://bugs.jython.org/issue{jython}',
},
{
'pattern': r'(Python #|bpo-)(?P<python>\d+)',
'url': 'https://bugs.python.org/issue{python}',
},
{
'pattern': r'\bpython/cpython#(?P<cpython>\d+)',
'url': '{GH}/python/cpython/issues/{cpython}',
},
{
'pattern': r'Interop #(?P<interop>\d+)',
'url': '{GH}/pypa/interoperability-peps/issues/{interop}',
},
{
'pattern': r'Pip #(?P<pip>\d+)',
'url': '{GH}/pypa/pip/issues/{pip}',
},
{
'pattern': r'Packaging #(?P<packaging>\d+)',
'url': '{GH}/pypa/packaging/issues/{packaging}',
},
{
'pattern': r'[Pp]ackaging (?P<packaging_ver>\d+(\.\d+)+)',
'url': '{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',
},
{
'pattern': r'setuptools_svn #(?P<setuptools_svn>\d+)',
'url': '{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}',
},
{
'pattern': r'pypa/(?P<issue_repo>[\-\.\w]+)#(?P<issue_number>\d+)',
'url': '{GH}/pypa/{issue_repo}/issues/{issue_number}',
},
{
'pattern': r'pypa/(?P<commit_repo>[\-\.\w]+)@(?P<commit_number>[\da-f]+)',
'url': '{GH}/pypa/{commit_repo}/commit/{commit_number}',
},
],
),
},
}

# Be strict about any broken references
Expand Down
6 changes: 4 additions & 2 deletions pkg_resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,9 @@ class _ZipLoaderModule(Protocol):
__loader__: zipimport.zipimporter


_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
_PEP440_FALLBACK = re.compile(
r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE
)


class PEP440Warning(RuntimeWarning):
Expand Down Expand Up @@ -3172,7 +3174,7 @@ def __getattr__(self, attr: str):
def __dir__(self):
return list(
set(super().__dir__())
| set(attr for attr in self._provider.__dir__() if not attr.startswith('_'))
| {attr for attr in self._provider.__dir__() if not attr.startswith('_')}
)

@classmethod
Expand Down
4 changes: 2 additions & 2 deletions pkg_resources/tests/test_pkg_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ class Environment(str):
env = Environment(tmpdir)
tmpdir.chmod(stat.S_IRWXU)
subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
env.paths = dict((dirname, str(tmpdir / dirname)) for dirname in subs)
env.paths = {dirname: str(tmpdir / dirname) for dirname in subs}
list(map(os.mkdir, env.paths.values()))
return env

Expand All @@ -357,7 +357,7 @@ def create_foo_pkg(self, env, version):
as version.
"""
ld = "This package has unicode metadata! ❄"
attrs = dict(name='foo', version=version, long_description=ld)
attrs = {"name": 'foo', "version": version, "long_description": ld}
dist = distutils.dist.Distribution(attrs)
iei_cmd = distutils.command.install_egg_info.install_egg_info(dist)
iei_cmd.initialize_options()
Expand Down
16 changes: 8 additions & 8 deletions pkg_resources/tests/test_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ def test_resolve_conflicts_with_prior(self):
ws.resolve(parse_requirements("Foo\nBar\n"))

msg = "Baz 1.0 is installed but Baz==2.0 is required by "
msg += repr(set(['Bar']))
msg += repr({'Bar'})
assert vc.value.report() == msg


Expand Down Expand Up @@ -481,13 +481,13 @@ def checkSubMap(self, m):
assert sorted(m.get(key).attrs) == sorted(ep.attrs)
assert sorted(m.get(key).extras) == sorted(ep.extras)

submap_expect = dict(
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
feature2=EntryPoint(
submap_expect = {
"feature1": EntryPoint('feature1', 'somemodule', ['somefunction']),
"feature2": EntryPoint(
'feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2']
),
feature3=EntryPoint('feature3', 'this.module', extras=['something']),
)
"feature3": EntryPoint('feature3', 'this.module', extras=['something']),
}
submap_str = """
# define features for blah blah
feature1 = somemodule:somefunction
Expand Down Expand Up @@ -561,8 +561,8 @@ def testOptionsAndHashing(self):
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
assert r1 == r2
assert set(r1.extras) == set(("foo", "bar"))
assert set(r2.extras) == set(("foo", "bar"))
assert set(r1.extras) == {"foo", "bar"}
assert set(r2.extras) == {"foo", "bar"}
assert hash(r1) == hash(r2)
assert hash(r1) == hash((
"twisted",
Expand Down
1 change: 1 addition & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ extend-select = [

# local
"ANN2", # missing-return-type-*
"C4", # flake8-comprehensions
"F", # Pyflakes
"F404", # late-future-import
"FA", # flake8-future-annotations
Expand Down
8 changes: 4 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,16 +76,16 @@
"""
Undo secondary effect of `extra_path` adding to `install_lib`
"""
suffix = os.path.relpath(self.install_lib, self.install_libbase)

Check warning on line 79 in setup.py

View workflow job for this annotation

GitHub Actions / pyright (3.9, ubuntu-latest)

No overloads for "relpath" match the provided arguments (reportCallIssue)

Check warning on line 79 in setup.py

View workflow job for this annotation

GitHub Actions / pyright (3.9, ubuntu-latest)

Argument of type "str | None" cannot be assigned to parameter "path" of type "StrPath" in function "relpath"   Type "str | None" is not assignable to type "StrPath"     Type "None" is not assignable to type "StrPath"       "None" is not assignable to "str"       "None" is incompatible with protocol "PathLike[str]"         "__fspath__" is not present (reportArgumentType)

Check warning on line 79 in setup.py

View workflow job for this annotation

GitHub Actions / pyright (3.13, ubuntu-latest)

No overloads for "relpath" match the provided arguments (reportCallIssue)

Check warning on line 79 in setup.py

View workflow job for this annotation

GitHub Actions / pyright (3.13, ubuntu-latest)

Argument of type "Unknown | None" cannot be assigned to parameter "path" of type "StrPath" in function "relpath"   Type "Unknown | None" is not assignable to type "StrPath"     Type "None" is not assignable to type "StrPath"       "None" is not assignable to "str"       "None" is incompatible with protocol "PathLike[str]"         "__fspath__" is not present (reportArgumentType)

if suffix.strip() == self._pth_contents.strip():
self.install_lib = self.install_libbase


setup_params = dict(
cmdclass={'install': install_with_pth},
package_data=package_data,
)
setup_params = {
"cmdclass": {'install': install_with_pth},
"package_data": package_data,
}

if __name__ == '__main__':
# allow setup.py to run from another directory
Expand Down
10 changes: 6 additions & 4 deletions setuptools/_normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,12 @@
import packaging

# https://packaging.python.org/en/latest/specifications/core-metadata/#name
_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I)
_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.I)
_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I)
_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE)
_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.IGNORECASE)
_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.IGNORECASE)
_PEP440_FALLBACK = re.compile(
r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE
)


def safe_identifier(name: str) -> str:
Expand Down
6 changes: 3 additions & 3 deletions setuptools/command/_requirestxt.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@ def _move_install_requirements_markers(
for r in complex_reqs:
extras_require[':' + str(r.marker)].setdefault(r)

expanded_extras = dict(
expanded_extras = {
# list(dict.fromkeys(...)) ensures a list of unique strings
(k, list(dict.fromkeys(str(r) for r in map(_clean_req, v))))
k: list(dict.fromkeys(str(r) for r in map(_clean_req, v)))
for k, v in extras_require.items()
)
}

return simple_install_requires, expanded_extras

Expand Down
3 changes: 1 addition & 2 deletions setuptools/command/bdist_egg.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ def _get_purelib():
def strip_module(filename):
if '.' in filename:
filename = os.path.splitext(filename)[0]
if filename.endswith('module'):
filename = filename[:-6]
filename = filename.removesuffix('module')
return filename


Expand Down
8 changes: 4 additions & 4 deletions setuptools/command/build_clib.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@ def build_libraries(self, libraries) -> None:
"'sources' must be present and must be "
"a list of source filenames"
)
sources = sorted(list(sources))
sources = sorted(sources)

log.info("building '%s' library", lib_name)

# Make sure everything is the correct type.
# obj_deps should be a dictionary of keys as sources
# and a list/tuple of files that are its dependencies.
obj_deps = build_info.get('obj_deps', dict())
obj_deps = build_info.get('obj_deps', {})
if not isinstance(obj_deps, dict):
raise DistutilsSetupError(
f"in 'libraries' option (library '{lib_name}'), "
Expand All @@ -51,7 +51,7 @@ def build_libraries(self, libraries) -> None:

# Get the global dependencies that are specified by the '' key.
# These will go into every source's dependency list.
global_deps = obj_deps.get('', list())
global_deps = obj_deps.get('', [])
if not isinstance(global_deps, (list, tuple)):
raise DistutilsSetupError(
f"in 'libraries' option (library '{lib_name}'), "
Expand All @@ -64,7 +64,7 @@ def build_libraries(self, libraries) -> None:
for source in sources:
src_deps = [source]
src_deps.extend(global_deps)
extra_deps = obj_deps.get(source, list())
extra_deps = obj_deps.get(source, [])
if not isinstance(extra_deps, (list, tuple)):
raise DistutilsSetupError(
f"in 'libraries' option (library '{lib_name}'), "
Expand Down
5 changes: 3 additions & 2 deletions setuptools/command/build_ext.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import itertools
import operator
import os
import sys
from collections.abc import Iterator
Expand Down Expand Up @@ -326,7 +327,7 @@ def get_outputs(self) -> list[str]:
def get_output_mapping(self) -> dict[str, str]:
"""See :class:`setuptools.commands.build.SubCommand`"""
mapping = self._get_output_mapping()
return dict(sorted(mapping, key=lambda x: x[0]))
return dict(sorted(mapping, key=operator.itemgetter(0)))

def __get_stubs_outputs(self):
# assemble the base name for each extension that needs a stub
Expand All @@ -337,7 +338,7 @@ def __get_stubs_outputs(self):
)
# pair each base with the extension
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
return list(base + fnext for base, fnext in pairs)
return [base + fnext for base, fnext in pairs]

def __get_output_extensions(self):
yield '.py'
Expand Down
3 changes: 2 additions & 1 deletion setuptools/command/build_py.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import fnmatch
import itertools
import operator
import os
import stat
import textwrap
Expand Down Expand Up @@ -147,7 +148,7 @@ def get_output_mapping(self) -> dict[str, str]:
self._get_package_data_output_mapping(),
self._get_module_mapping(),
)
return dict(sorted(mapping, key=lambda x: x[0]))
return dict(sorted(mapping, key=operator.itemgetter(0)))

def _get_module_mapping(self) -> Iterator[tuple[str, str]]:
"""Iterate over all modules producing (dest, src) pairs."""
Expand Down
Loading
Loading