Skip to content

Commit be7a813

Browse files
committed
Remove cyclic dev dependency with the parser crate
1 parent 349a4cf commit be7a813

File tree

73 files changed

+677
-718
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

73 files changed

+677
-718
lines changed

Cargo.lock

Lines changed: 1 addition & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/ruff_python_ast/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ serde = { workspace = true, optional = true }
2727

2828
[dev-dependencies]
2929
insta = { workspace = true }
30-
ruff_python_parser = { path = "../ruff_python_parser" }
3130

3231
[features]
3332
serde = ["dep:serde", "ruff_text_size/serde"]

crates/ruff_python_index/src/comment_ranges.rs

Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,3 +42,159 @@ pub fn tokens_and_ranges(
4242
let comment_ranges = comment_ranges.finish();
4343
Ok((tokens, comment_ranges))
4444
}
45+
46+
#[cfg(test)]
47+
mod tests {
48+
use ruff_python_parser::lexer::LexResult;
49+
use ruff_python_parser::{tokenize, Mode};
50+
use ruff_source_file::Locator;
51+
use ruff_text_size::TextSize;
52+
53+
use crate::Indexer;
54+
55+
#[test]
56+
fn block_comments_two_line_block_at_start() {
57+
// arrange
58+
let source = "# line 1\n# line 2\n";
59+
let tokens = tokenize(source, Mode::Module);
60+
let locator = Locator::new(source);
61+
let indexer = Indexer::from_tokens(&tokens, &locator);
62+
63+
// act
64+
let block_comments = indexer.comment_ranges().block_comments(&locator);
65+
66+
// assert
67+
assert_eq!(block_comments, vec![TextSize::new(0), TextSize::new(9)]);
68+
}
69+
70+
#[test]
71+
fn block_comments_indented_block() {
72+
// arrange
73+
let source = " # line 1\n # line 2\n";
74+
let tokens = tokenize(source, Mode::Module);
75+
let locator = Locator::new(source);
76+
let indexer = Indexer::from_tokens(&tokens, &locator);
77+
78+
// act
79+
let block_comments = indexer.comment_ranges().block_comments(&locator);
80+
81+
// assert
82+
assert_eq!(block_comments, vec![TextSize::new(4), TextSize::new(17)]);
83+
}
84+
85+
#[test]
86+
fn block_comments_single_line_is_not_a_block() {
87+
// arrange
88+
let source = "\n";
89+
let tokens: Vec<LexResult> = tokenize(source, Mode::Module);
90+
let locator = Locator::new(source);
91+
let indexer = Indexer::from_tokens(&tokens, &locator);
92+
93+
// act
94+
let block_comments = indexer.comment_ranges().block_comments(&locator);
95+
96+
// assert
97+
assert_eq!(block_comments, Vec::<TextSize>::new());
98+
}
99+
100+
#[test]
101+
fn block_comments_lines_with_code_not_a_block() {
102+
// arrange
103+
let source = "x = 1 # line 1\ny = 2 # line 2\n";
104+
let tokens = tokenize(source, Mode::Module);
105+
let locator = Locator::new(source);
106+
let indexer = Indexer::from_tokens(&tokens, &locator);
107+
108+
// act
109+
let block_comments = indexer.comment_ranges().block_comments(&locator);
110+
111+
// assert
112+
assert_eq!(block_comments, Vec::<TextSize>::new());
113+
}
114+
115+
#[test]
116+
fn block_comments_sequential_lines_not_in_block() {
117+
// arrange
118+
let source = " # line 1\n # line 2\n";
119+
let tokens = tokenize(source, Mode::Module);
120+
let locator = Locator::new(source);
121+
let indexer = Indexer::from_tokens(&tokens, &locator);
122+
123+
// act
124+
let block_comments = indexer.comment_ranges().block_comments(&locator);
125+
126+
// assert
127+
assert_eq!(block_comments, Vec::<TextSize>::new());
128+
}
129+
130+
#[test]
131+
fn block_comments_lines_in_triple_quotes_not_a_block() {
132+
// arrange
133+
let source = r#"
134+
"""
135+
# line 1
136+
# line 2
137+
"""
138+
"#;
139+
let tokens = tokenize(source, Mode::Module);
140+
let locator = Locator::new(source);
141+
let indexer = Indexer::from_tokens(&tokens, &locator);
142+
143+
// act
144+
let block_comments = indexer.comment_ranges().block_comments(&locator);
145+
146+
// assert
147+
assert_eq!(block_comments, Vec::<TextSize>::new());
148+
}
149+
150+
#[test]
151+
fn block_comments_stress_test() {
152+
// arrange
153+
let source = r#"
154+
# block comment 1 line 1
155+
# block comment 2 line 2
156+
157+
# these lines
158+
# do not form
159+
# a block comment
160+
161+
x = 1 # these lines also do not
162+
y = 2 # do not form a block comment
163+
164+
# these lines do form a block comment
165+
#
166+
167+
#
168+
# and so do these
169+
#
170+
171+
"""
172+
# these lines are in triple quotes and
173+
# therefore do not form a block comment
174+
"""
175+
"#;
176+
let tokens = tokenize(source, Mode::Module);
177+
let locator = Locator::new(source);
178+
let indexer = Indexer::from_tokens(&tokens, &locator);
179+
180+
// act
181+
let block_comments = indexer.comment_ranges().block_comments(&locator);
182+
183+
// assert
184+
assert_eq!(
185+
block_comments,
186+
vec![
187+
// Block #1
188+
TextSize::new(1),
189+
TextSize::new(26),
190+
// Block #2
191+
TextSize::new(174),
192+
TextSize::new(212),
193+
// Block #3
194+
TextSize::new(219),
195+
TextSize::new(225),
196+
TextSize::new(247)
197+
]
198+
);
199+
}
200+
}

crates/ruff_python_parser/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ unicode_names2 = { workspace = true }
2929
unicode-normalization = { workspace = true }
3030

3131
[dev-dependencies]
32+
ruff_python_trivia = { path = "../ruff_python_trivia" }
3233
ruff_source_file = { path = "../ruff_source_file" }
3334

3435
annotate-snippets = { workspace = true }

0 commit comments

Comments
 (0)