use owned ast and tokens in bench (#11598)

This commit is contained in:
Micha Reiser 2024-05-29 18:10:32 +02:00 committed by GitHub
parent e14096f0a8
commit 921bc15542
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 39 additions and 63 deletions

View file

@ -60,7 +60,9 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
// Parse the source.
let ast = parse_program_tokens(tokens.clone(), case.code(), false).unwrap();
b.iter(|| {
b.iter_batched(
|| (ast.clone(), tokens.clone()),
|(ast, tokens)| {
let path = case.path();
let result = lint_only(
&path,
@ -69,15 +71,14 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
flags::Noqa::Enabled,
&SourceKind::Python(case.code().to_string()),
PySourceType::from(path.as_path()),
ParseSource::Precomputed {
tokens: &tokens,
ast: &ast,
},
ParseSource::Precomputed { tokens, ast },
);
// Assert that file contains no parse errors
assert_eq!(result.error, None);
});
},
criterion::BatchSize::SmallInput,
);
},
);
}

View file

@ -146,7 +146,7 @@ pub fn check_path(
.any(|rule_code| rule_code.lint_source().is_imports());
if use_ast || use_imports || use_doc_lines {
// Parse, if the AST wasn't pre-provided provided.
match tokens.into_ast_source(source_kind, source_type) {
match tokens.into_ast(source_kind, source_type) {
Ok(python_ast) => {
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index);
@ -684,23 +684,16 @@ This indicates a bug in Ruff. If you could open an issue at:
}
#[derive(Debug, Clone)]
pub enum ParseSource<'a> {
pub enum ParseSource {
/// Extract the tokens and AST from the given source code.
None,
/// Use the precomputed tokens and AST.
Precomputed {
tokens: &'a [LexResult],
ast: &'a Suite,
},
Precomputed { tokens: Tokens, ast: Suite },
}
impl<'a> ParseSource<'a> {
impl ParseSource {
/// Convert to a [`TokenSource`], tokenizing if necessary.
fn into_token_source(
self,
source_kind: &SourceKind,
source_type: PySourceType,
) -> TokenSource<'a> {
fn into_token_source(self, source_kind: &SourceKind, source_type: PySourceType) -> TokenSource {
match self {
Self::None => TokenSource::Tokens(ruff_python_parser::tokenize(
source_kind.source_code(),
@ -712,17 +705,14 @@ impl<'a> ParseSource<'a> {
}
#[derive(Debug, Clone)]
pub enum TokenSource<'a> {
pub enum TokenSource {
/// Use the precomputed tokens to generate the AST.
Tokens(Tokens),
/// Use the precomputed tokens and AST.
Precomputed {
tokens: &'a [LexResult],
ast: &'a Suite,
},
Precomputed { tokens: Tokens, ast: Suite },
}
impl TokenSource<'_> {
impl TokenSource {
/// Returns an iterator over the [`TokenKind`] and the corresponding range.
///
/// [`TokenKind`]: ruff_python_parser::TokenKind
@ -734,7 +724,7 @@ impl TokenSource<'_> {
}
}
impl Deref for TokenSource<'_> {
impl Deref for TokenSource {
type Target = [LexResult];
fn deref(&self) -> &Self::Target {
@ -745,39 +735,20 @@ impl Deref for TokenSource<'_> {
}
}
impl<'a> TokenSource<'a> {
impl TokenSource {
/// Convert to an [`AstSource`], parsing if necessary.
fn into_ast_source(
fn into_ast(
self,
source_kind: &SourceKind,
source_type: PySourceType,
) -> Result<AstSource<'a>, ParseError> {
) -> Result<Suite, ParseError> {
match self {
Self::Tokens(tokens) => Ok(AstSource::Ast(ruff_python_parser::parse_program_tokens(
Self::Tokens(tokens) => Ok(ruff_python_parser::parse_program_tokens(
tokens,
source_kind.source_code(),
source_type.is_ipynb(),
)?)),
Self::Precomputed { ast, .. } => Ok(AstSource::Precomputed(ast)),
}
}
}
#[derive(Debug, Clone)]
pub enum AstSource<'a> {
/// Extract the AST from the given source code.
Ast(Suite),
/// Use the precomputed AST.
Precomputed(&'a Suite),
}
impl Deref for AstSource<'_> {
type Target = Suite;
fn deref(&self) -> &Self::Target {
match self {
Self::Ast(ast) => ast,
Self::Precomputed(ast) => ast,
)?),
Self::Precomputed { ast, .. } => Ok(ast),
}
}
}

View file

@ -353,10 +353,12 @@ mod tests {
use insta::assert_debug_snapshot;
use serde::de::DeserializeOwned;
#[cfg(not(windows))]
use ruff_linter::registry::Linter;
use super::*;
#[cfg(not(windows))]
const VS_CODE_INIT_OPTIONS_FIXTURE: &str =
include_str!("../../resources/test/fixtures/settings/vs_code_initialization_options.json");
const GLOBAL_ONLY_INIT_OPTIONS_FIXTURE: &str =
@ -368,7 +370,8 @@ mod tests {
serde_json::from_str(content).expect("test fixture JSON should deserialize")
}
#[cfg_attr(not(windows), test)]
#[cfg(not(windows))]
#[test]
fn test_vs_code_init_options_deserialize() {
let options: InitializationOptions = deserialize_fixture(VS_CODE_INIT_OPTIONS_FIXTURE);
@ -553,7 +556,8 @@ mod tests {
"###);
}
#[cfg_attr(not(windows), test)]
#[cfg(not(windows))]
#[test]
fn test_vs_code_workspace_settings_resolve() {
let options = deserialize_fixture(VS_CODE_INIT_OPTIONS_FIXTURE);
let AllSettings {