mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Remove criterion/codspeed compat layer (#12524)
This commit is contained in:
parent
9f72f474e6
commit
71f7aa4971
11 changed files with 25 additions and 52 deletions
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
|
@ -616,7 +616,7 @@ jobs:
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: "Build benchmarks"
|
- name: "Build benchmarks"
|
||||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
run: cargo codspeed build -p ruff_benchmark
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@v2
|
uses: CodSpeedHQ/action@v2
|
||||||
|
|
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2048,7 +2048,6 @@ name = "ruff_benchmark"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"codspeed-criterion-compat",
|
"codspeed-criterion-compat",
|
||||||
"criterion",
|
|
||||||
"mimalloc",
|
"mimalloc",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"red_knot",
|
"red_knot",
|
||||||
|
|
|
@ -58,7 +58,6 @@ console_error_panic_hook = { version = "0.1.7" }
|
||||||
console_log = { version = "1.0.0" }
|
console_log = { version = "1.0.0" }
|
||||||
countme = { version = "3.0.1" }
|
countme = { version = "3.0.1" }
|
||||||
compact_str = "0.8.0"
|
compact_str = "0.8.0"
|
||||||
criterion = { version = "0.5.1", default-features = false }
|
|
||||||
crossbeam = { version = "0.8.4" }
|
crossbeam = { version = "0.8.4" }
|
||||||
dashmap = { version = "6.0.1" }
|
dashmap = { version = "6.0.1" }
|
||||||
drop_bomb = { version = "0.1.5" }
|
drop_bomb = { version = "0.1.5" }
|
||||||
|
|
|
@ -41,8 +41,7 @@ serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
ureq = { workspace = true }
|
ureq = { workspace = true }
|
||||||
criterion = { workspace = true, default-features = false }
|
codspeed-criterion-compat = { workspace = true, default-features = false }
|
||||||
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_db = { workspace = true }
|
ruff_db = { workspace = true }
|
||||||
|
@ -56,9 +55,6 @@ red_knot = { workspace = true }
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[features]
|
|
||||||
codspeed = ["codspeed-criterion-compat"]
|
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||||
mimalloc = { workspace = true }
|
mimalloc = { workspace = true }
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use ruff_benchmark::criterion::{
|
use codspeed_criterion_compat::{
|
||||||
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
criterion_group, criterion_main, BenchmarkId, Criterion, Throughput,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||||
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
||||||
use ruff_python_parser::{parse, Mode};
|
use ruff_python_parser::{parse, Mode};
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use ruff_benchmark::criterion::{
|
use codspeed_criterion_compat::{
|
||||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||||
use ruff_python_parser::{lexer, Mode, TokenKind};
|
use ruff_python_parser::{lexer, Mode, TokenKind};
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
use ruff_benchmark::criterion::{
|
use codspeed_criterion_compat::{
|
||||||
criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
|
self as criterion, criterion_group, criterion_main, BenchmarkGroup, BenchmarkId, Criterion,
|
||||||
|
Throughput,
|
||||||
};
|
};
|
||||||
|
use criterion::measurement;
|
||||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||||
use ruff_linter::linter::{lint_only, ParseSource};
|
use ruff_linter::linter::{lint_only, ParseSource};
|
||||||
use ruff_linter::rule_selector::PreviewOptions;
|
use ruff_linter::rule_selector::PreviewOptions;
|
||||||
|
@ -44,7 +46,7 @@ fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
fn benchmark_linter(mut group: BenchmarkGroup<measurement::WallTime>, settings: &LinterSettings) {
|
||||||
let test_cases = create_test_cases().unwrap();
|
let test_cases = create_test_cases().unwrap();
|
||||||
|
|
||||||
for case in test_cases {
|
for case in test_cases {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use ruff_benchmark::criterion::{
|
use codspeed_criterion_compat::{
|
||||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||||
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
||||||
use ruff_python_ast::Stmt;
|
use ruff_python_ast::Stmt;
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
#![allow(clippy::disallowed_names)]
|
#![allow(clippy::disallowed_names)]
|
||||||
|
|
||||||
|
use codspeed_criterion_compat::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||||
|
|
||||||
use red_knot::db::RootDatabase;
|
use red_knot::db::RootDatabase;
|
||||||
use red_knot::workspace::WorkspaceMetadata;
|
use red_knot::workspace::WorkspaceMetadata;
|
||||||
use ruff_benchmark::criterion::{
|
|
||||||
criterion_group, criterion_main, BatchSize, Criterion, Throughput,
|
|
||||||
};
|
|
||||||
use ruff_db::files::{system_path_to_file, vendored_path_to_file, File};
|
use ruff_db::files::{system_path_to_file, vendored_path_to_file, File};
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
use ruff_db::program::{ProgramSettings, SearchPathSettings, TargetVersion};
|
use ruff_db::program::{ProgramSettings, SearchPathSettings, TargetVersion};
|
||||||
|
@ -100,10 +99,7 @@ fn setup_case() -> Case {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn benchmark_without_parse(criterion: &mut Criterion) {
|
fn benchmark_without_parse(criterion: &mut Criterion) {
|
||||||
let mut group = criterion.benchmark_group("red_knot/check_file");
|
criterion.bench_function("red_knot_check_file[without_parse]", |b| {
|
||||||
group.throughput(Throughput::Bytes(FOO_CODE.len() as u64));
|
|
||||||
|
|
||||||
group.bench_function("red_knot_check_file[without_parse]", |b| {
|
|
||||||
b.iter_batched_ref(
|
b.iter_batched_ref(
|
||||||
|| {
|
|| {
|
||||||
let case = setup_case();
|
let case = setup_case();
|
||||||
|
@ -123,15 +119,10 @@ fn benchmark_without_parse(criterion: &mut Criterion) {
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn benchmark_incremental(criterion: &mut Criterion) {
|
fn benchmark_incremental(criterion: &mut Criterion) {
|
||||||
let mut group = criterion.benchmark_group("red_knot/check_file");
|
criterion.bench_function("red_knot_check_file[incremental]", |b| {
|
||||||
group.throughput(Throughput::Bytes(FOO_CODE.len() as u64));
|
|
||||||
|
|
||||||
group.bench_function("red_knot_check_file[incremental]", |b| {
|
|
||||||
b.iter_batched_ref(
|
b.iter_batched_ref(
|
||||||
|| {
|
|| {
|
||||||
let mut case = setup_case();
|
let mut case = setup_case();
|
||||||
|
@ -156,15 +147,10 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn benchmark_cold(criterion: &mut Criterion) {
|
fn benchmark_cold(criterion: &mut Criterion) {
|
||||||
let mut group = criterion.benchmark_group("red_knot/check_file");
|
criterion.bench_function("red_knot_check_file[cold]", |b| {
|
||||||
group.throughput(Throughput::Bytes(FOO_CODE.len() as u64));
|
|
||||||
|
|
||||||
group.bench_function("red_knot_check_file[cold]", |b| {
|
|
||||||
b.iter_batched_ref(
|
b.iter_batched_ref(
|
||||||
setup_case,
|
setup_case,
|
||||||
|case| {
|
|case| {
|
||||||
|
@ -176,11 +162,12 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(cold, benchmark_cold);
|
criterion_group!(
|
||||||
criterion_group!(without_parse, benchmark_without_parse);
|
check_file,
|
||||||
criterion_group!(incremental, benchmark_incremental);
|
benchmark_cold,
|
||||||
criterion_main!(without_parse, cold, incremental);
|
benchmark_without_parse,
|
||||||
|
benchmark_incremental
|
||||||
|
);
|
||||||
|
criterion_main!(check_file);
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
//! This module re-exports the criterion API but picks the right backend depending on whether
|
|
||||||
//! the benchmarks are built to run locally or with codspeed
|
|
||||||
|
|
||||||
#[cfg(not(codspeed))]
|
|
||||||
pub use criterion::*;
|
|
||||||
|
|
||||||
#[cfg(not(codspeed))]
|
|
||||||
pub type BenchmarkGroup<'a> = criterion::BenchmarkGroup<'a, measurement::WallTime>;
|
|
||||||
|
|
||||||
#[cfg(codspeed)]
|
|
||||||
pub use codspeed_criterion_compat::*;
|
|
|
@ -1,5 +1,3 @@
|
||||||
pub mod criterion;
|
|
||||||
|
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue