ruff/crates/ruff_python_trivia_integration_tests/tests/block_comments.rs
Dhruv Manilawala 28cc71fb6b
Remove cyclic dev dependency with the parser crate (#11261)
## Summary

This PR removes the cyclic dev dependency some of the crates had with
the parser crate.

The cyclic dependencies are:
* `ruff_python_ast` has a **dev dependency** on `ruff_python_parser` and
`ruff_python_parser` directly depends on `ruff_python_ast`
* `ruff_python_trivia` has a **dev dependency** on `ruff_python_parser`
and `ruff_python_parser` has an indirect dependency on
`ruff_python_trivia` (`ruff_python_parser` - `ruff_python_ast` -
`ruff_python_trivia`)

Specifically, this PR does the following:
* Introduce two new crates
* `ruff_python_ast_integration_tests` and move the tests from the
`ruff_python_ast` crate which uses the parser in this crate
* `ruff_python_trivia_integration_tests` and move the tests from the
`ruff_python_trivia` crate which uses the parser in this crate

### Motivation

The main motivation for this PR is to help development. Before this PR,
`rust-analyzer` wouldn't provide any intellisense in the
`ruff_python_parser` crate regarding the symbols in `ruff_python_ast`
crate.

```
[ERROR][2024-05-03 13:47:06] .../vim/lsp/rpc.lua:770	"rpc"	"/Users/dhruv/.cargo/bin/rust-analyzer"	"stderr"	"[ERROR project_model::workspace] cyclic deps: ruff_python_parser(Idx::<CrateData>(50)) -> ruff_python_ast(Idx::<CrateData>(37)), alternative path: ruff_python_ast(Idx::<CrateData>(37)) -> ruff_python_parser(Idx::<CrateData>(50))\n"
```

## Test Plan

Check the logs of `rust-analyzer` to not see any signs of cyclic
dependency.
2024-05-07 09:24:57 +00:00

151 lines
3.8 KiB
Rust

use ruff_python_index::Indexer;
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::{tokenize, Mode};
use ruff_source_file::Locator;
use ruff_text_size::TextSize;
#[test]
fn block_comments_two_line_block_at_start() {
// arrange
let source = "# line 1\n# line 2\n";
let tokens = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(block_comments, vec![TextSize::new(0), TextSize::new(9)]);
}
#[test]
fn block_comments_indented_block() {
// arrange
let source = " # line 1\n # line 2\n";
let tokens = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(block_comments, vec![TextSize::new(4), TextSize::new(17)]);
}
#[test]
fn block_comments_single_line_is_not_a_block() {
// arrange
let source = "\n";
let tokens: Vec<LexResult> = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(block_comments, Vec::<TextSize>::new());
}
#[test]
fn block_comments_lines_with_code_not_a_block() {
// arrange
let source = "x = 1 # line 1\ny = 2 # line 2\n";
let tokens = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(block_comments, Vec::<TextSize>::new());
}
#[test]
fn block_comments_sequential_lines_not_in_block() {
// arrange
let source = " # line 1\n # line 2\n";
let tokens = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(block_comments, Vec::<TextSize>::new());
}
#[test]
fn block_comments_lines_in_triple_quotes_not_a_block() {
// arrange
let source = r#"
"""
# line 1
# line 2
"""
"#;
let tokens = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(block_comments, Vec::<TextSize>::new());
}
#[test]
fn block_comments_stress_test() {
// arrange
let source = r#"
# block comment 1 line 1
# block comment 2 line 2
# these lines
# do not form
# a block comment
x = 1 # these lines also do not
y = 2 # do not form a block comment
# these lines do form a block comment
#
#
# and so do these
#
"""
# these lines are in triple quotes and
# therefore do not form a block comment
"""
"#;
let tokens = tokenize(source, Mode::Module);
let locator = Locator::new(source);
let indexer = Indexer::from_tokens(&tokens, &locator);
// act
let block_comments = indexer.comment_ranges().block_comments(&locator);
// assert
assert_eq!(
block_comments,
vec![
// Block #1
TextSize::new(1),
TextSize::new(26),
// Block #2
TextSize::new(174),
TextSize::new(212),
// Block #3
TextSize::new(219),
TextSize::new(225),
TextSize::new(247)
]
);
}