revert some now-unnecessary changes

operand_start was now used in only one place again, and the intermediate
variable in NeedsParentheses was no longer needed
This commit is contained in:
Brent Westbrook 2025-11-17 12:02:41 -05:00
parent bd612c4d3a
commit 91cb799f0b
No known key found for this signature in database
2 changed files with 23 additions and 36 deletions

View file

@ -13,7 +13,6 @@ use std::cmp::Ordering;
use crate::comments::visitor::{CommentPlacement, DecoratedComment}; use crate::comments::visitor::{CommentPlacement, DecoratedComment};
use crate::expression::expr_slice::{ExprSliceCommentSection, assign_comment_in_slice}; use crate::expression::expr_slice::{ExprSliceCommentSection, assign_comment_in_slice};
use crate::expression::expr_unary_op::operand_start;
use crate::expression::parentheses::is_expression_parenthesized; use crate::expression::parentheses::is_expression_parenthesized;
use crate::other::parameters::{ use crate::other::parameters::{
assign_argument_separator_comment_placement, find_parameter_separators, assign_argument_separator_comment_placement, find_parameter_separators,
@ -1915,7 +1914,22 @@ fn handle_unary_op_comment<'a>(
unary_op: &'a ast::ExprUnaryOp, unary_op: &'a ast::ExprUnaryOp,
source: &str, source: &str,
) -> CommentPlacement<'a> { ) -> CommentPlacement<'a> {
let up_to = operand_start(unary_op, source); let mut tokenizer = SimpleTokenizer::new(
source,
TextRange::new(unary_op.start(), unary_op.operand.start()),
)
.skip_trivia();
let op_token = tokenizer.next();
debug_assert!(op_token.is_some_and(|token| matches!(
token.kind,
SimpleTokenKind::Tilde
| SimpleTokenKind::Not
| SimpleTokenKind::Plus
| SimpleTokenKind::Minus
)));
let up_to = tokenizer
.find(|token| token.kind == SimpleTokenKind::LParen)
.map_or(unary_op.operand.start(), |lparen| lparen.start());
if comment.end() < up_to && comment.line_position().is_end_of_line() { if comment.end() < up_to && comment.line_position().is_end_of_line() {
CommentPlacement::dangling(unary_op, comment) CommentPlacement::dangling(unary_op, comment)
} else { } else {

View file

@ -2,8 +2,7 @@ use ruff_python_ast::AnyNodeRef;
use ruff_python_ast::ExprUnaryOp; use ruff_python_ast::ExprUnaryOp;
use ruff_python_ast::UnaryOp; use ruff_python_ast::UnaryOp;
use ruff_python_ast::parenthesize::parenthesized_range; use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer}; use ruff_text_size::Ranged;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::comments::trailing_comments; use crate::comments::trailing_comments;
use crate::expression::parentheses::{ use crate::expression::parentheses::{
@ -103,45 +102,19 @@ impl NeedsParentheses for ExprUnaryOp {
context: &PyFormatContext, context: &PyFormatContext,
) -> OptionalParentheses { ) -> OptionalParentheses {
if parent.is_expr_await() { if parent.is_expr_await() {
return OptionalParentheses::Always; OptionalParentheses::Always
} } else if context.comments().has_dangling(self) {
OptionalParentheses::Multiline
if context.comments().has_dangling(self) { } else if is_expression_parenthesized(
return OptionalParentheses::Multiline;
}
if is_expression_parenthesized(
self.operand.as_ref().into(), self.operand.as_ref().into(),
context.comments().ranges(), context.comments().ranges(),
context.source(), context.source(),
) { ) {
return OptionalParentheses::Never; OptionalParentheses::Never
} } else if context.comments().has(self.operand.as_ref()) {
if context.comments().has(self.operand.as_ref()) {
OptionalParentheses::Always OptionalParentheses::Always
} else { } else {
self.operand.needs_parentheses(self.into(), context) self.operand.needs_parentheses(self.into(), context)
} }
} }
} }
/// Returns the start of `unary_op`'s operand, or its leading parenthesis, if it has one.
pub(crate) fn operand_start(unary_op: &ExprUnaryOp, source: &str) -> TextSize {
let mut tokenizer = SimpleTokenizer::new(
source,
TextRange::new(unary_op.start(), unary_op.operand.start()),
)
.skip_trivia();
let op_token = tokenizer.next();
debug_assert!(op_token.is_some_and(|token| matches!(
token.kind,
SimpleTokenKind::Tilde
| SimpleTokenKind::Not
| SimpleTokenKind::Plus
| SimpleTokenKind::Minus
)));
tokenizer
.find(|token| token.kind == SimpleTokenKind::LParen)
.map_or(unary_op.operand.start(), |lparen| lparen.start())
}