mirror of
https://github.com/denoland/deno.git
synced 2025-12-23 08:48:24 +00:00
ci: upload test results as artifacts (#31686)
This will allow us to track slow and flaky tests.
This commit is contained in:
parent
3fbb1daddb
commit
0a162aaecc
10 changed files with 400 additions and 22 deletions
25
.github/workflows/ci.generate.ts
vendored
25
.github/workflows/ci.generate.ts
vendored
|
|
@ -982,6 +982,29 @@ const ci = {
|
||||||
"fi",
|
"fi",
|
||||||
].join("\n"),
|
].join("\n"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Combine test results",
|
||||||
|
if: [
|
||||||
|
"always() &&",
|
||||||
|
"matrix.job == 'test' &&",
|
||||||
|
"!startsWith(github.ref, 'refs/tags/')",
|
||||||
|
].join("\n"),
|
||||||
|
run: "deno run -RWN ./tools/combine_test_results.js",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Upload test results",
|
||||||
|
uses: "actions/upload-artifact@v4",
|
||||||
|
if: [
|
||||||
|
"always() &&",
|
||||||
|
"matrix.job == 'test' &&",
|
||||||
|
"!startsWith(github.ref, 'refs/tags/')",
|
||||||
|
].join("\n"),
|
||||||
|
with: {
|
||||||
|
name:
|
||||||
|
"test-results-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}.json",
|
||||||
|
path: "target/test_results.json",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "Configure hosts file for WPT",
|
name: "Configure hosts file for WPT",
|
||||||
if: "matrix.wpt",
|
if: "matrix.wpt",
|
||||||
|
|
@ -1058,7 +1081,7 @@ const ci = {
|
||||||
run: "cargo bench --locked",
|
run: "cargo bench --locked",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Post Benchmarks",
|
name: "Post benchmarks",
|
||||||
if: [
|
if: [
|
||||||
"matrix.job == 'bench' &&",
|
"matrix.job == 'bench' &&",
|
||||||
"github.repository == 'denoland/deno' &&",
|
"github.repository == 'denoland/deno' &&",
|
||||||
|
|
|
||||||
17
.github/workflows/ci.yml
vendored
17
.github/workflows/ci.yml
vendored
|
|
@ -583,6 +583,21 @@ jobs:
|
||||||
- name: Ensure no git changes
|
- name: Ensure no git changes
|
||||||
if: '!(matrix.skip) && (matrix.job == ''test'' && github.event_name == ''pull_request'')'
|
if: '!(matrix.skip) && (matrix.job == ''test'' && github.event_name == ''pull_request'')'
|
||||||
run: "if [[ -n \"$(git status --porcelain)\" ]]; then\necho \"❌ Git working directory is dirty. Ensure `cargo test` is not modifying git tracked files.\"\necho \"\"\necho \"\U0001F4CB Status:\"\ngit status\necho \"\"\nexit 1\nfi"
|
run: "if [[ -n \"$(git status --porcelain)\" ]]; then\necho \"❌ Git working directory is dirty. Ensure `cargo test` is not modifying git tracked files.\"\necho \"\"\necho \"\U0001F4CB Status:\"\ngit status\necho \"\"\nexit 1\nfi"
|
||||||
|
- name: Combine test results
|
||||||
|
if: |-
|
||||||
|
!(matrix.skip) && (always() &&
|
||||||
|
matrix.job == 'test' &&
|
||||||
|
!startsWith(github.ref, 'refs/tags/'))
|
||||||
|
run: deno run -RWN ./tools/combine_test_results.js
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: |-
|
||||||
|
!(matrix.skip) && (always() &&
|
||||||
|
matrix.job == 'test' &&
|
||||||
|
!startsWith(github.ref, 'refs/tags/'))
|
||||||
|
with:
|
||||||
|
name: 'test-results-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}.json'
|
||||||
|
path: target/test_results.json
|
||||||
- name: Configure hosts file for WPT
|
- name: Configure hosts file for WPT
|
||||||
if: '!(matrix.skip) && (matrix.wpt)'
|
if: '!(matrix.skip) && (matrix.wpt)'
|
||||||
run: ./wpt make-hosts-file | sudo tee -a /etc/hosts
|
run: ./wpt make-hosts-file | sudo tee -a /etc/hosts
|
||||||
|
|
@ -637,7 +652,7 @@ jobs:
|
||||||
- name: Run benchmarks
|
- name: Run benchmarks
|
||||||
if: '!(matrix.skip) && (matrix.job == ''bench'' && !startsWith(github.ref, ''refs/tags/''))'
|
if: '!(matrix.skip) && (matrix.job == ''bench'' && !startsWith(github.ref, ''refs/tags/''))'
|
||||||
run: cargo bench --locked
|
run: cargo bench --locked
|
||||||
- name: Post Benchmarks
|
- name: Post benchmarks
|
||||||
if: |-
|
if: |-
|
||||||
!(matrix.skip) && (matrix.job == 'bench' &&
|
!(matrix.skip) && (matrix.job == 'bench' &&
|
||||||
github.repository == 'denoland/deno' &&
|
github.repository == 'denoland/deno' &&
|
||||||
|
|
|
||||||
4
Cargo.lock
generated
4
Cargo.lock
generated
|
|
@ -4055,9 +4055,9 @@ checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "file_test_runner"
|
name = "file_test_runner"
|
||||||
version = "0.11.0"
|
version = "0.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6e11cf3ae4bdb8b7174c189f7480a0b94cbc51b77b101bef41fb0ac968bdb313"
|
checksum = "f10931d8b801120f1248672b54a47edd8776fdd8bc92c1598de5aeb01ee6ffec"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
|
|
|
||||||
|
|
@ -172,7 +172,7 @@ error_reporter = "1"
|
||||||
fast-socks5 = "0.9.6"
|
fast-socks5 = "0.9.6"
|
||||||
faster-hex = "0.10.0"
|
faster-hex = "0.10.0"
|
||||||
fastwebsockets = { version = "0.8", features = ["upgrade", "unstable-split"] }
|
fastwebsockets = { version = "0.8", features = ["upgrade", "unstable-split"] }
|
||||||
file_test_runner = "0.11.0"
|
file_test_runner = "0.11.1"
|
||||||
filetime = "0.2.16"
|
filetime = "0.2.16"
|
||||||
flate2 = { version = "1.0.30", default-features = false }
|
flate2 = { version = "1.0.30", default-features = false }
|
||||||
fqdn = "0.4.6"
|
fqdn = "0.4.6"
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,14 @@
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
use std::panic::AssertUnwindSafe;
|
use std::panic::AssertUnwindSafe;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use file_test_runner::RunOptions;
|
use file_test_runner::RunOptions;
|
||||||
use file_test_runner::TestResult;
|
use file_test_runner::TestResult;
|
||||||
use file_test_runner::collection::CollectedTest;
|
use file_test_runner::collection::CollectedTest;
|
||||||
use file_test_runner::collection::CollectedTestCategory;
|
use file_test_runner::collection::CollectedTestCategory;
|
||||||
use test_util::TestMacroCase;
|
use test_util::TestMacroCase;
|
||||||
|
use test_util::test_runner::FlakyTestTracker;
|
||||||
use test_util::test_runner::Parallelism;
|
use test_util::test_runner::Parallelism;
|
||||||
use test_util::test_runner::run_maybe_flaky_test;
|
use test_util::test_runner::run_maybe_flaky_test;
|
||||||
|
|
||||||
|
|
@ -81,6 +83,7 @@ pub fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
let run_test = move |test: &CollectedTest<&'static TestMacroCase>,
|
let run_test = move |test: &CollectedTest<&'static TestMacroCase>,
|
||||||
|
flaky_test_tracker: &FlakyTestTracker,
|
||||||
parallelism: Option<&Parallelism>| {
|
parallelism: Option<&Parallelism>| {
|
||||||
if test.data.ignore {
|
if test.data.ignore {
|
||||||
return TestResult::Ignored;
|
return TestResult::Ignored;
|
||||||
|
|
@ -118,6 +121,7 @@ pub fn main() {
|
||||||
run_maybe_flaky_test(
|
run_maybe_flaky_test(
|
||||||
&test.name,
|
&test.name,
|
||||||
test.data.flaky || *test_util::IS_CI,
|
test.data.flaky || *test_util::IS_CI,
|
||||||
|
flaky_test_tracker,
|
||||||
parallelism,
|
parallelism,
|
||||||
run_test,
|
run_test,
|
||||||
)
|
)
|
||||||
|
|
@ -127,14 +131,21 @@ pub fn main() {
|
||||||
main_category.partition(|t| t.name.contains("::watcher::"));
|
main_category.partition(|t| t.name.contains("::watcher::"));
|
||||||
|
|
||||||
// watcher tests are really flaky, so run them sequentially
|
// watcher tests are really flaky, so run them sequentially
|
||||||
let reporter = test_util::test_runner::get_test_reporter();
|
let flaky_test_tracker = Arc::new(FlakyTestTracker::default());
|
||||||
|
let reporter = test_util::test_runner::get_test_reporter(
|
||||||
|
"integration",
|
||||||
|
flaky_test_tracker.clone(),
|
||||||
|
);
|
||||||
file_test_runner::run_tests(
|
file_test_runner::run_tests(
|
||||||
&watcher_tests,
|
&watcher_tests,
|
||||||
RunOptions {
|
RunOptions {
|
||||||
parallelism: NonZeroUsize::new(1).unwrap(),
|
parallelism: NonZeroUsize::new(1).unwrap(),
|
||||||
reporter: reporter.clone(),
|
reporter: reporter.clone(),
|
||||||
},
|
},
|
||||||
move |test| run_test(test, None),
|
{
|
||||||
|
let flaky_test_tracker = flaky_test_tracker.clone();
|
||||||
|
move |test| run_test(test, &flaky_test_tracker, None)
|
||||||
|
},
|
||||||
);
|
);
|
||||||
let parallelism = Parallelism::default();
|
let parallelism = Parallelism::default();
|
||||||
file_test_runner::run_tests(
|
file_test_runner::run_tests(
|
||||||
|
|
@ -143,6 +154,6 @@ pub fn main() {
|
||||||
parallelism: parallelism.max_parallelism(),
|
parallelism: parallelism.max_parallelism(),
|
||||||
reporter: reporter.clone(),
|
reporter: reporter.clone(),
|
||||||
},
|
},
|
||||||
move |test| run_test(test, Some(¶llelism)),
|
move |test| run_test(test, &flaky_test_tracker, Some(¶llelism)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ use std::collections::HashSet;
|
||||||
use std::panic::AssertUnwindSafe;
|
use std::panic::AssertUnwindSafe;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use file_test_runner::NO_CAPTURE;
|
use file_test_runner::NO_CAPTURE;
|
||||||
|
|
@ -24,6 +25,7 @@ use serde::Deserialize;
|
||||||
use test_util::IS_CI;
|
use test_util::IS_CI;
|
||||||
use test_util::PathRef;
|
use test_util::PathRef;
|
||||||
use test_util::TestContextBuilder;
|
use test_util::TestContextBuilder;
|
||||||
|
use test_util::test_runner::FlakyTestTracker;
|
||||||
use test_util::test_runner::Parallelism;
|
use test_util::test_runner::Parallelism;
|
||||||
use test_util::test_runner::run_maybe_flaky_test;
|
use test_util::test_runner::run_maybe_flaky_test;
|
||||||
use test_util::tests_path;
|
use test_util::tests_path;
|
||||||
|
|
@ -263,18 +265,23 @@ pub fn main() {
|
||||||
|
|
||||||
let _http_guard = test_util::http_server();
|
let _http_guard = test_util::http_server();
|
||||||
let parallelism = Parallelism::default();
|
let parallelism = Parallelism::default();
|
||||||
|
let flaky_test_tracker = Arc::new(FlakyTestTracker::default());
|
||||||
file_test_runner::run_tests(
|
file_test_runner::run_tests(
|
||||||
&root_category,
|
&root_category,
|
||||||
file_test_runner::RunOptions {
|
file_test_runner::RunOptions {
|
||||||
parallelism: parallelism.max_parallelism(),
|
parallelism: parallelism.max_parallelism(),
|
||||||
reporter: test_util::test_runner::get_test_reporter(),
|
reporter: test_util::test_runner::get_test_reporter(
|
||||||
|
"specs",
|
||||||
|
flaky_test_tracker.clone(),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
move |test| run_test(test, ¶llelism),
|
move |test| run_test(test, &flaky_test_tracker, ¶llelism),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_test(
|
fn run_test(
|
||||||
test: &CollectedTest<serde_json::Value>,
|
test: &CollectedTest<serde_json::Value>,
|
||||||
|
flaky_test_tracker: &FlakyTestTracker,
|
||||||
parallelism: &Parallelism,
|
parallelism: &Parallelism,
|
||||||
) -> TestResult {
|
) -> TestResult {
|
||||||
let cwd = PathRef::new(&test.path).parent();
|
let cwd = PathRef::new(&test.path).parent();
|
||||||
|
|
@ -296,6 +303,7 @@ fn run_test(
|
||||||
&metadata,
|
&metadata,
|
||||||
&cwd,
|
&cwd,
|
||||||
diagnostic_logger.clone(),
|
diagnostic_logger.clone(),
|
||||||
|
flaky_test_tracker,
|
||||||
parallelism,
|
parallelism,
|
||||||
);
|
);
|
||||||
if result.is_failed() {
|
if result.is_failed() {
|
||||||
|
|
@ -309,6 +317,7 @@ fn run_test(
|
||||||
&metadata,
|
&metadata,
|
||||||
&cwd,
|
&cwd,
|
||||||
diagnostic_logger.clone(),
|
diagnostic_logger.clone(),
|
||||||
|
flaky_test_tracker,
|
||||||
parallelism,
|
parallelism,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -334,6 +343,7 @@ fn run_test_inner(
|
||||||
metadata: &MultiStepMetaData,
|
metadata: &MultiStepMetaData,
|
||||||
cwd: &PathRef,
|
cwd: &PathRef,
|
||||||
diagnostic_logger: Rc<RefCell<Vec<u8>>>,
|
diagnostic_logger: Rc<RefCell<Vec<u8>>>,
|
||||||
|
flaky_test_tracker: &FlakyTestTracker,
|
||||||
parallelism: &Parallelism,
|
parallelism: &Parallelism,
|
||||||
) -> TestResult {
|
) -> TestResult {
|
||||||
let run_fn = || {
|
let run_fn = || {
|
||||||
|
|
@ -350,7 +360,13 @@ fn run_test_inner(
|
||||||
TestResult::Passed { duration: None }
|
TestResult::Passed { duration: None }
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
let result = run_maybe_flaky_test(&test.name, step.flaky, None, run_func);
|
let result = run_maybe_flaky_test(
|
||||||
|
&test.name,
|
||||||
|
step.flaky,
|
||||||
|
flaky_test_tracker,
|
||||||
|
None,
|
||||||
|
run_func,
|
||||||
|
);
|
||||||
if result.is_failed() {
|
if result.is_failed() {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
@ -360,6 +376,7 @@ fn run_test_inner(
|
||||||
run_maybe_flaky_test(
|
run_maybe_flaky_test(
|
||||||
&test.name,
|
&test.name,
|
||||||
metadata.flaky || *IS_CI,
|
metadata.flaky || *IS_CI,
|
||||||
|
flaky_test_tracker,
|
||||||
Some(parallelism),
|
Some(parallelism),
|
||||||
run_fn,
|
run_fn,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use file_test_runner::RunOptions;
|
use file_test_runner::RunOptions;
|
||||||
use file_test_runner::TestResult;
|
use file_test_runner::TestResult;
|
||||||
use file_test_runner::collection::CollectOptions;
|
use file_test_runner::collection::CollectOptions;
|
||||||
|
|
@ -8,6 +10,7 @@ use file_test_runner::collection::collect_tests_or_exit;
|
||||||
use file_test_runner::collection::strategies::TestPerFileCollectionStrategy;
|
use file_test_runner::collection::strategies::TestPerFileCollectionStrategy;
|
||||||
use test_util as util;
|
use test_util as util;
|
||||||
use test_util::TestContextBuilder;
|
use test_util::TestContextBuilder;
|
||||||
|
use test_util::test_runner::FlakyTestTracker;
|
||||||
use test_util::test_runner::Parallelism;
|
use test_util::test_runner::Parallelism;
|
||||||
use test_util::test_runner::flaky_test_ci;
|
use test_util::test_runner::flaky_test_ci;
|
||||||
use test_util::tests_path;
|
use test_util::tests_path;
|
||||||
|
|
@ -24,15 +27,21 @@ fn main() {
|
||||||
return; // no tests to run for the filter
|
return; // no tests to run for the filter
|
||||||
}
|
}
|
||||||
let parallelism = Parallelism::default();
|
let parallelism = Parallelism::default();
|
||||||
|
let flaky_test_tracker = Arc::new(FlakyTestTracker::default());
|
||||||
let _g = util::http_server();
|
let _g = util::http_server();
|
||||||
file_test_runner::run_tests(
|
file_test_runner::run_tests(
|
||||||
&category,
|
&category,
|
||||||
RunOptions {
|
RunOptions {
|
||||||
parallelism: parallelism.max_parallelism(),
|
parallelism: parallelism.max_parallelism(),
|
||||||
reporter: test_util::test_runner::get_test_reporter(),
|
reporter: test_util::test_runner::get_test_reporter(
|
||||||
|
"unit",
|
||||||
|
flaky_test_tracker.clone(),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
move |test| {
|
move |test| {
|
||||||
flaky_test_ci(&test.name, Some(¶llelism), || run_test(test))
|
flaky_test_ci(&test.name, &flaky_test_tracker, Some(¶llelism), || {
|
||||||
|
run_test(test)
|
||||||
|
})
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||||
|
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use file_test_runner::RunOptions;
|
use file_test_runner::RunOptions;
|
||||||
use file_test_runner::TestResult;
|
use file_test_runner::TestResult;
|
||||||
|
|
@ -9,6 +10,7 @@ use file_test_runner::collection::CollectedTest;
|
||||||
use file_test_runner::collection::collect_tests_or_exit;
|
use file_test_runner::collection::collect_tests_or_exit;
|
||||||
use file_test_runner::collection::strategies::TestPerFileCollectionStrategy;
|
use file_test_runner::collection::strategies::TestPerFileCollectionStrategy;
|
||||||
use test_util as util;
|
use test_util as util;
|
||||||
|
use test_util::test_runner::FlakyTestTracker;
|
||||||
use test_util::test_runner::Parallelism;
|
use test_util::test_runner::Parallelism;
|
||||||
use test_util::test_runner::flaky_test_ci;
|
use test_util::test_runner::flaky_test_ci;
|
||||||
use test_util::tests_path;
|
use test_util::tests_path;
|
||||||
|
|
@ -27,19 +29,31 @@ fn main() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let parallelism = Parallelism::default();
|
let parallelism = Parallelism::default();
|
||||||
|
let flaky_test_tracker = Arc::new(FlakyTestTracker::default());
|
||||||
let _g = util::http_server();
|
let _g = util::http_server();
|
||||||
// Run the crypto category tests separately without concurrency because they run in Deno with --parallel
|
// Run the crypto category tests separately without concurrency because they run in Deno with --parallel
|
||||||
let (crypto_category, category) =
|
let (crypto_category, category) =
|
||||||
category.partition(|test| test.name.contains("::crypto::"));
|
category.partition(|test| test.name.contains("::crypto::"));
|
||||||
let reporter = test_util::test_runner::get_test_reporter();
|
let reporter = test_util::test_runner::get_test_reporter(
|
||||||
|
"unit_node",
|
||||||
|
flaky_test_tracker.clone(),
|
||||||
|
);
|
||||||
file_test_runner::run_tests(
|
file_test_runner::run_tests(
|
||||||
&category,
|
&category,
|
||||||
RunOptions {
|
RunOptions {
|
||||||
parallelism: parallelism.max_parallelism(),
|
parallelism: parallelism.max_parallelism(),
|
||||||
reporter: reporter.clone(),
|
reporter: reporter.clone(),
|
||||||
},
|
},
|
||||||
move |test| {
|
{
|
||||||
flaky_test_ci(&test.name, Some(¶llelism), || run_test(test))
|
let flaky_test_tracker = flaky_test_tracker.clone();
|
||||||
|
move |test| {
|
||||||
|
flaky_test_ci(
|
||||||
|
&test.name,
|
||||||
|
&flaky_test_tracker,
|
||||||
|
Some(¶llelism),
|
||||||
|
|| run_test(test),
|
||||||
|
)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
file_test_runner::run_tests(
|
file_test_runner::run_tests(
|
||||||
|
|
@ -48,7 +62,9 @@ fn main() {
|
||||||
parallelism: NonZeroUsize::new(1).unwrap(),
|
parallelism: NonZeroUsize::new(1).unwrap(),
|
||||||
reporter: reporter.clone(),
|
reporter: reporter.clone(),
|
||||||
},
|
},
|
||||||
move |test| flaky_test_ci(&test.name, None, || run_test(test)),
|
move |test| {
|
||||||
|
flaky_test_ci(&test.name, &flaky_test_tracker, None, || run_test(test))
|
||||||
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
// Copyright 2018-2025 the Deno authors. MIT license.
|
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::io::IsTerminal;
|
use std::io::IsTerminal;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
|
@ -14,17 +15,50 @@ use file_test_runner::RunOptions;
|
||||||
use file_test_runner::TestResult;
|
use file_test_runner::TestResult;
|
||||||
use file_test_runner::reporter::LogReporter;
|
use file_test_runner::reporter::LogReporter;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::IS_CI;
|
use crate::IS_CI;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
use crate::semaphore::Semaphore;
|
use crate::semaphore::Semaphore;
|
||||||
|
|
||||||
|
/// Tracks the number of times each test has been flaky
|
||||||
|
pub struct FlakyTestTracker {
|
||||||
|
flaky_counts: Mutex<HashMap<String, usize>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FlakyTestTracker {
|
||||||
|
pub fn record_flaky(&self, test_name: &str) {
|
||||||
|
let mut counts = self.flaky_counts.lock();
|
||||||
|
*counts.entry(test_name.to_string()).or_insert(0) += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_count(&self, test_name: &str) -> usize {
|
||||||
|
let counts = self.flaky_counts.lock();
|
||||||
|
counts.get(test_name).copied().unwrap_or(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FlakyTestTracker {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
flaky_counts: Mutex::new(HashMap::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn flaky_test_ci(
|
pub fn flaky_test_ci(
|
||||||
test_name: &str,
|
test_name: &str,
|
||||||
|
flaky_test_tracker: &FlakyTestTracker,
|
||||||
parallelism: Option<&Parallelism>,
|
parallelism: Option<&Parallelism>,
|
||||||
run_test: impl Fn() -> TestResult,
|
run_test: impl Fn() -> TestResult,
|
||||||
) -> TestResult {
|
) -> TestResult {
|
||||||
run_maybe_flaky_test(test_name, *IS_CI, parallelism, run_test)
|
run_maybe_flaky_test(
|
||||||
|
test_name,
|
||||||
|
*IS_CI,
|
||||||
|
flaky_test_tracker,
|
||||||
|
parallelism,
|
||||||
|
run_test,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SingleConcurrencyFlagGuard<'a>(&'a Parallelism);
|
struct SingleConcurrencyFlagGuard<'a>(&'a Parallelism);
|
||||||
|
|
@ -80,6 +114,7 @@ impl Parallelism {
|
||||||
pub fn run_maybe_flaky_test(
|
pub fn run_maybe_flaky_test(
|
||||||
test_name: &str,
|
test_name: &str,
|
||||||
is_flaky: bool,
|
is_flaky: bool,
|
||||||
|
flaky_test_tracker: &FlakyTestTracker,
|
||||||
parallelism: Option<&Parallelism>,
|
parallelism: Option<&Parallelism>,
|
||||||
main_action: impl Fn() -> TestResult,
|
main_action: impl Fn() -> TestResult,
|
||||||
) -> TestResult {
|
) -> TestResult {
|
||||||
|
|
@ -93,6 +128,7 @@ pub fn run_maybe_flaky_test(
|
||||||
if !result.is_failed() {
|
if !result.is_failed() {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
flaky_test_tracker.record_flaky(test_name);
|
||||||
#[allow(clippy::print_stderr)]
|
#[allow(clippy::print_stderr)]
|
||||||
if *IS_CI {
|
if *IS_CI {
|
||||||
::std::eprintln!(
|
::std::eprintln!(
|
||||||
|
|
@ -120,6 +156,7 @@ pub fn run_maybe_flaky_test(
|
||||||
if !result.is_failed() {
|
if !result.is_failed() {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
flaky_test_tracker.record_flaky(test_name);
|
||||||
std::thread::sleep(Duration::from_millis(100));
|
std::thread::sleep(Duration::from_millis(100));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -173,16 +210,240 @@ pub fn with_timeout(
|
||||||
TestTimeoutHolder { _tx: tx }
|
TestTimeoutHolder { _tx: tx }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_test_reporter<TData>()
|
#[derive(Debug, Clone, Serialize)]
|
||||||
-> Arc<dyn file_test_runner::reporter::Reporter<TData>> {
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct RecordedTestResult {
|
||||||
|
name: String,
|
||||||
|
path: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
duration: Option<u128>,
|
||||||
|
#[serde(skip_serializing_if = "is_false")]
|
||||||
|
failed: bool,
|
||||||
|
#[serde(skip_serializing_if = "is_false")]
|
||||||
|
ignored: bool,
|
||||||
|
#[serde(skip_serializing_if = "is_zero")]
|
||||||
|
flaky_count: usize,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
sub_tests: Vec<RecordedTestResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_false(value: &bool) -> bool {
|
||||||
|
!value
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_zero(value: &usize) -> bool {
|
||||||
|
*value == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct RecordedReport {
|
||||||
|
tests: Vec<RecordedTestResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct JsonReporter {
|
||||||
|
data: Arc<Mutex<RecordedReport>>,
|
||||||
|
flaky_tracker: Arc<FlakyTestTracker>,
|
||||||
|
test_module_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonReporter {
|
||||||
|
pub fn new(
|
||||||
|
flaky_tracker: Arc<FlakyTestTracker>,
|
||||||
|
test_module_name: String,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
data: Default::default(),
|
||||||
|
flaky_tracker,
|
||||||
|
test_module_name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_report_to_file(&self) {
|
||||||
|
let json = {
|
||||||
|
let data = self.data.lock();
|
||||||
|
serde_json::to_string(&*data).unwrap()
|
||||||
|
};
|
||||||
|
let file_path = crate::root_path()
|
||||||
|
.join("target")
|
||||||
|
.join(format!("test_results_{}.json", self.test_module_name));
|
||||||
|
|
||||||
|
file_path.write(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flatten_and_record_test(
|
||||||
|
&self,
|
||||||
|
tests: &mut Vec<RecordedTestResult>,
|
||||||
|
test_name: String,
|
||||||
|
path: String,
|
||||||
|
result: &TestResult,
|
||||||
|
main_duration: Option<Duration>,
|
||||||
|
) {
|
||||||
|
match result {
|
||||||
|
TestResult::SubTests {
|
||||||
|
sub_tests,
|
||||||
|
duration,
|
||||||
|
} => {
|
||||||
|
let mut sub_test_results = Vec::with_capacity(sub_tests.len());
|
||||||
|
for sub_test in sub_tests {
|
||||||
|
let full_name = format!("{}::{}", test_name, sub_test.name);
|
||||||
|
self.flatten_and_record_test(
|
||||||
|
&mut sub_test_results,
|
||||||
|
full_name,
|
||||||
|
path.clone(),
|
||||||
|
&sub_test.result,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let flaky_count = self.flaky_tracker.get_count(&test_name);
|
||||||
|
tests.push(RecordedTestResult {
|
||||||
|
name: test_name,
|
||||||
|
path,
|
||||||
|
duration: duration.or(main_duration).map(|d| d.as_millis()),
|
||||||
|
failed: sub_tests.iter().any(|s| s.result.is_failed()),
|
||||||
|
ignored: false,
|
||||||
|
flaky_count,
|
||||||
|
sub_tests: sub_test_results,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
TestResult::Passed { duration } => {
|
||||||
|
let flaky_count = self.flaky_tracker.get_count(&test_name);
|
||||||
|
let test_result = RecordedTestResult {
|
||||||
|
name: test_name,
|
||||||
|
path,
|
||||||
|
duration: duration.or(main_duration).map(|d| d.as_millis()),
|
||||||
|
failed: false,
|
||||||
|
ignored: false,
|
||||||
|
flaky_count,
|
||||||
|
sub_tests: Vec::new(),
|
||||||
|
};
|
||||||
|
tests.push(test_result);
|
||||||
|
}
|
||||||
|
TestResult::Failed { duration, .. } => {
|
||||||
|
let flaky_count = self.flaky_tracker.get_count(&test_name);
|
||||||
|
let test_result = RecordedTestResult {
|
||||||
|
name: test_name,
|
||||||
|
path,
|
||||||
|
duration: duration.or(main_duration).map(|d| d.as_millis()),
|
||||||
|
failed: true,
|
||||||
|
ignored: false,
|
||||||
|
flaky_count,
|
||||||
|
sub_tests: Vec::new(),
|
||||||
|
};
|
||||||
|
tests.push(test_result.clone());
|
||||||
|
}
|
||||||
|
TestResult::Ignored => {
|
||||||
|
let flaky_count = self.flaky_tracker.get_count(&test_name);
|
||||||
|
let test_result = RecordedTestResult {
|
||||||
|
name: test_name,
|
||||||
|
path,
|
||||||
|
duration: None,
|
||||||
|
failed: false,
|
||||||
|
ignored: true,
|
||||||
|
flaky_count,
|
||||||
|
sub_tests: Vec::new(),
|
||||||
|
};
|
||||||
|
tests.push(test_result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<TData> file_test_runner::reporter::Reporter<TData> for JsonReporter {
|
||||||
|
fn report_category_start(
|
||||||
|
&self,
|
||||||
|
_category: &file_test_runner::collection::CollectedTestCategory<TData>,
|
||||||
|
_context: &file_test_runner::reporter::ReporterContext,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
||||||
|
fn report_category_end(
|
||||||
|
&self,
|
||||||
|
_category: &file_test_runner::collection::CollectedTestCategory<TData>,
|
||||||
|
_context: &file_test_runner::reporter::ReporterContext,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
||||||
|
fn report_test_start(
|
||||||
|
&self,
|
||||||
|
_test: &file_test_runner::collection::CollectedTest<TData>,
|
||||||
|
_context: &file_test_runner::reporter::ReporterContext,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
||||||
|
fn report_test_end(
|
||||||
|
&self,
|
||||||
|
test: &file_test_runner::collection::CollectedTest<TData>,
|
||||||
|
duration: Duration,
|
||||||
|
result: &TestResult,
|
||||||
|
_context: &file_test_runner::reporter::ReporterContext,
|
||||||
|
) {
|
||||||
|
let mut data = self.data.lock();
|
||||||
|
|
||||||
|
let relative_path = test
|
||||||
|
.path
|
||||||
|
.strip_prefix(crate::root_path())
|
||||||
|
.unwrap_or(&test.path);
|
||||||
|
let path = match test.line_and_column {
|
||||||
|
Some((line, col)) => {
|
||||||
|
format!("{}:{}:{}", relative_path.display(), line + 1, col + 1)
|
||||||
|
}
|
||||||
|
None => relative_path.display().to_string(),
|
||||||
|
}
|
||||||
|
.replace("\\", "/");
|
||||||
|
|
||||||
|
// Use the helper function to recursively flatten subtests
|
||||||
|
self.flatten_and_record_test(
|
||||||
|
&mut data.tests,
|
||||||
|
test.name.to_string(),
|
||||||
|
path,
|
||||||
|
result,
|
||||||
|
Some(duration),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn report_failures(
|
||||||
|
&self,
|
||||||
|
_failures: &[file_test_runner::reporter::ReporterFailure<TData>],
|
||||||
|
_total_tests: usize,
|
||||||
|
) {
|
||||||
|
// Write the report to file when failures are reported (at the end of test run)
|
||||||
|
self.write_report_to_file();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ReporterData {
|
||||||
|
fn times_flaky() -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_test_reporter<TData: 'static>(
|
||||||
|
test_module_name: &str,
|
||||||
|
flaky_test_tracker: Arc<FlakyTestTracker>,
|
||||||
|
) -> Arc<dyn file_test_runner::reporter::Reporter<TData>> {
|
||||||
|
let mut reporters: Vec<Box<dyn file_test_runner::reporter::Reporter<TData>>> =
|
||||||
|
Vec::with_capacity(2);
|
||||||
|
reporters.push(get_display_reporter());
|
||||||
|
if *IS_CI {
|
||||||
|
reporters.push(Box::new(JsonReporter::new(
|
||||||
|
flaky_test_tracker,
|
||||||
|
test_module_name.to_string(),
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
Arc::new(file_test_runner::reporter::AggregateReporter::new(
|
||||||
|
reporters,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_display_reporter<TData>()
|
||||||
|
-> Box<dyn file_test_runner::reporter::Reporter<TData>> {
|
||||||
if *file_test_runner::NO_CAPTURE
|
if *file_test_runner::NO_CAPTURE
|
||||||
|| *IS_CI
|
|| *IS_CI
|
||||||
|| !std::io::stderr().is_terminal()
|
|| !std::io::stderr().is_terminal()
|
||||||
|| std::env::var("DENO_TEST_UTIL_REPORTER").ok().as_deref() == Some("log")
|
|| std::env::var("DENO_TEST_UTIL_REPORTER").ok().as_deref() == Some("log")
|
||||||
{
|
{
|
||||||
Arc::new(file_test_runner::reporter::LogReporter::default())
|
Box::new(file_test_runner::reporter::LogReporter::default())
|
||||||
} else {
|
} else {
|
||||||
Arc::new(PtyReporter::new())
|
Box::new(PtyReporter::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
26
tools/combine_test_results.js
Normal file
26
tools/combine_test_results.js
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||||
|
|
||||||
|
import { join } from "@std/path";
|
||||||
|
import { ROOT_PATH } from "./util.js";
|
||||||
|
|
||||||
|
const joinTarget = (fileName) => join(ROOT_PATH, "target", fileName);
|
||||||
|
const filePaths = [
|
||||||
|
"test_results_integration.json",
|
||||||
|
"test_results_specs.json",
|
||||||
|
"test_results_unit.json",
|
||||||
|
"test_results_unit_node.json",
|
||||||
|
].map((fileName) => joinTarget(fileName));
|
||||||
|
|
||||||
|
const tests = [];
|
||||||
|
for (const filePath of filePaths) {
|
||||||
|
try {
|
||||||
|
tests.push(...JSON.parse(Deno.readTextFileSync(filePath)).tests);
|
||||||
|
} catch (err) {
|
||||||
|
if (!(err instanceof Deno.errors.NotFound)) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const combinedFileText = JSON.stringify({ tests });
|
||||||
|
Deno.writeTextFileSync(joinTarget("test_results.json"), combinedFileText);
|
||||||
Loading…
Add table
Add a link
Reference in a new issue