mirror of
https://github.com/astral-sh/ruff.git
synced 2025-11-01 12:25:45 +00:00
Avoid omitting optional parentheses for argument-less parentheses (#6484)
## Summary
This PR fixes some misformattings around optional parentheses for
expressions.
I first noticed that we were misformatting this:
```python
return (
unicodedata.normalize("NFKC", s1).casefold()
== unicodedata.normalize("NFKC", s2).casefold()
)
```
The above is stable Black formatting, but we were doing:
```python
return unicodedata.normalize("NFKC", s1).casefold() == unicodedata.normalize(
"NFKC", s2
).casefold()
```
Above, the "last" expression is a function call, so our
`can_omit_optional_parentheses` was returning `true`...
However, it turns out that Black treats function calls differently
depending on whether or not they have arguments -- presumedly because
they'll never split empty parentheses, and so they're functionally
non-useful. On further investigation, I believe this applies to all
parenthesized expressions. If Black can't split on the parentheses, it
doesn't leverage them when removing optional parentheses.
## Test Plan
Nice increase in similarity scores.
Before:
- `zulip`: 0.99702
- `django`: 0.99784
- `warehouse`: 0.99585
- `build`: 0.75623
- `transformers`: 0.99470
- `cpython`: 0.75989
- `typeshed`: 0.74853
After:
- `zulip`: 0.99705
- `django`: 0.99795
- `warehouse`: 0.99600
- `build`: 0.75623
- `transformers`: 0.99471
- `cpython`: 0.75989
- `typeshed`: 0.74853
This commit is contained in:
parent
7c4aa3948b
commit
d616c9b870
7 changed files with 428 additions and 36 deletions
90
crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/split_empty_brackets.py
vendored
Normal file
90
crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/split_empty_brackets.py
vendored
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
# Expressions with empty parentheses.
|
||||
ct_match = (
|
||||
unicodedata.normalize("NFKC", s1).casefold()
|
||||
== unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
unicodedata.normalize("NFKC", s1).casefold(1)
|
||||
== unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
unicodedata.normalize("NFKC", s1).casefold(0)
|
||||
== unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold(1)
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
unicodedata.normalize("NFKC", s1).casefold(1)
|
||||
== unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold(1)
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
unicodedata.normalize("NFKC", s1).casefold(
|
||||
# foo
|
||||
)
|
||||
== unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold(
|
||||
# foo
|
||||
)
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
[].unicodedata.normalize("NFKC", s1).casefold()
|
||||
== [].unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
[].unicodedata.normalize("NFKC", s1).casefold()
|
||||
== [1].unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
[1].unicodedata.normalize("NFKC", s1).casefold()
|
||||
== [].unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
[1].unicodedata.normalize("NFKC", s1).casefold()
|
||||
== [1].unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
{}.unicodedata.normalize("NFKC", s1).casefold()
|
||||
== {}.unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
{}.unicodedata.normalize("NFKC", s1).casefold()
|
||||
== {1}.unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
{1}.unicodedata.normalize("NFKC", s1).casefold()
|
||||
== {}.unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
{1}.unicodedata.normalize("NFKC", s1).casefold()
|
||||
== {1}.unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
ct_match = (
|
||||
([]).unicodedata.normalize("NFKC", s1).casefold()
|
||||
== ([]).unicodedata.normalize("NFKCNFKCNFKCNFKCNFKC", s2).casefold()
|
||||
)
|
||||
|
||||
return await self.http_client.fetch(
|
||||
f"http://127.0.0.1:{self.port}{path}", method=method, **kwargs,
|
||||
)
|
||||
|
||||
return await self.http_client().fetch(
|
||||
f"http://127.0.0.1:{self.port}{path}", method=method, **kwargs,
|
||||
)
|
||||
|
||||
return await self().http_client().fetch(
|
||||
f"http://127.0.0.1:{self.port}{path}", method=method, **kwargs,
|
||||
)
|
||||
|
||||
response = await sync_to_async(
|
||||
lambda: self.django_handler.get_response(request), thread_sensitive=True
|
||||
)()
|
||||
|
|
@ -13,6 +13,17 @@ aa = [
|
|||
bakjdshflkjahdslkfjlasfdahjlfds
|
||||
] = dddd = ddd = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = [3]
|
||||
|
||||
aa = [
|
||||
|
||||
] = dddd = ddd = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = [3]
|
||||
|
||||
aa = [
|
||||
# foo
|
||||
] = dddd = ddd = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = [3]
|
||||
|
||||
aa = ([
|
||||
]) = dddd = ddd = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = [3]
|
||||
|
||||
aaaa = ( # trailing
|
||||
# comment
|
||||
bbbbb) = cccccccccccccccc = 3
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue