mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 21:35:20 +00:00
Parse cargo output a line at a time.
We previously used serde's stream deserializer to read json blobs from the cargo output. It has an issue though: If the deserializer encounters invalid input, it gets stuck reporting the same error again and again because it is unable to foward over the input until it reaches a new valid object. Reading a line at a time and manually deserializing fixes this issue, because cargo makes sure to only outpu one json blob per line, so should we encounter invalid input, we can just skip a line and continue. The main reason this would happen is stray printf-debugging in procedural macros, so we still report that an error occured, but we handle it gracefully now. Fixes #2935
This commit is contained in:
parent
6fd29651b4
commit
8ffbe86dfd
2 changed files with 22 additions and 5 deletions
|
@ -11,6 +11,7 @@ log = "0.4.3"
|
||||||
cargo_metadata = "0.9.1"
|
cargo_metadata = "0.9.1"
|
||||||
jod-thread = "0.1.0"
|
jod-thread = "0.1.0"
|
||||||
parking_lot = "0.10.0"
|
parking_lot = "0.10.0"
|
||||||
|
serde_json = "1.0.45"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "0.13.0"
|
insta = "0.13.0"
|
||||||
|
|
|
@ -9,7 +9,7 @@ use lsp_types::{
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
io::BufReader,
|
io::{BufRead, BufReader},
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
|
@ -350,13 +350,29 @@ impl WatchThread {
|
||||||
// which will break out of the loop, and continue the shutdown
|
// which will break out of the loop, and continue the shutdown
|
||||||
let _ = message_send.send(CheckEvent::Begin);
|
let _ = message_send.send(CheckEvent::Begin);
|
||||||
|
|
||||||
for message in
|
// We manually read a line at a time, instead of using serde's
|
||||||
cargo_metadata::parse_messages(BufReader::new(command.stdout.take().unwrap()))
|
// stream deserializers, because the deserializer cannot recover
|
||||||
{
|
// from an error, resulting in it getting stuck, because we try to
|
||||||
|
// be resillient against failures.
|
||||||
|
//
|
||||||
|
// Because cargo only outputs one JSON object per line, we can
|
||||||
|
// simply skip a line if it doesn't parse, which just ignores any
|
||||||
|
// erroneus output.
|
||||||
|
let stdout = BufReader::new(command.stdout.take().unwrap());
|
||||||
|
for line in stdout.lines() {
|
||||||
|
let line = match line {
|
||||||
|
Ok(line) => line,
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("Couldn't read line from cargo: {:?}", err);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let message = serde_json::from_str::<cargo_metadata::Message>(&line);
|
||||||
let message = match message {
|
let message = match message {
|
||||||
Ok(message) => message,
|
Ok(message) => message,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::error!("Invalid json from cargo check, ignoring: {}", err);
|
log::error!("Invalid json from cargo check, ignoring ({}): {} ", err, line);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue