mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Auto merge of #17640 - Veykril:parse-dyn, r=Veykril
Parse contextual dyn keyword properly in edition 2015 Turns out this is more important than I thought it would be given the metrics :)
This commit is contained in:
commit
2c2b6c9bcc
15 changed files with 180 additions and 23 deletions
|
@ -18,7 +18,7 @@ use crate::{
|
||||||
name, quote,
|
name, quote,
|
||||||
quote::dollar_crate,
|
quote::dollar_crate,
|
||||||
tt::{self, DelimSpan},
|
tt::{self, DelimSpan},
|
||||||
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroFileIdExt,
|
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_rules! register_builtin {
|
macro_rules! register_builtin {
|
||||||
|
@ -687,8 +687,8 @@ fn relative_file(
|
||||||
path_str: &str,
|
path_str: &str,
|
||||||
allow_recursion: bool,
|
allow_recursion: bool,
|
||||||
) -> Result<EditionedFileId, ExpandError> {
|
) -> Result<EditionedFileId, ExpandError> {
|
||||||
let call_site =
|
let lookup = call_id.lookup(db);
|
||||||
call_id.as_macro_file().parent(db).original_file_respecting_includes(db).file_id();
|
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
|
||||||
let path = AnchoredPath { anchor: call_site, path: path_str };
|
let path = AnchoredPath { anchor: call_site, path: path_str };
|
||||||
let res = db
|
let res = db
|
||||||
.resolve_path(path)
|
.resolve_path(path)
|
||||||
|
@ -697,7 +697,7 @@ fn relative_file(
|
||||||
if res == call_site && !allow_recursion {
|
if res == call_site && !allow_recursion {
|
||||||
Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
|
Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
|
||||||
} else {
|
} else {
|
||||||
Ok(EditionedFileId::new(res, Edition::CURRENT_FIXME))
|
Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -65,9 +65,8 @@ pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(
|
||||||
i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
|
i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
|
||||||
_ if ident.is_raw.yes() => res.push(IDENT),
|
_ if ident.is_raw.yes() => res.push(IDENT),
|
||||||
"gen" if !edition.at_least_2024() => res.push(IDENT),
|
"gen" if !edition.at_least_2024() => res.push(IDENT),
|
||||||
"async" | "await" | "dyn" | "try" if !edition.at_least_2018() => {
|
"dyn" if !edition.at_least_2018() => res.push_ident(DYN_KW),
|
||||||
res.push(IDENT)
|
"async" | "await" | "try" if !edition.at_least_2018() => res.push(IDENT),
|
||||||
}
|
|
||||||
text => match SyntaxKind::from_keyword(text) {
|
text => match SyntaxKind::from_keyword(text) {
|
||||||
Some(kind) => res.push(kind),
|
Some(kind) => res.push(kind),
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -2,6 +2,8 @@ use super::*;
|
||||||
|
|
||||||
pub(super) const PATH_FIRST: TokenSet =
|
pub(super) const PATH_FIRST: TokenSet =
|
||||||
TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![Self], T![:], T![<]]);
|
TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![Self], T![:], T![<]]);
|
||||||
|
pub(super) const WEAK_DYN_PATH_FIRST: TokenSet =
|
||||||
|
TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![Self]]);
|
||||||
|
|
||||||
pub(super) fn is_path_start(p: &Parser<'_>) -> bool {
|
pub(super) fn is_path_start(p: &Parser<'_>) -> bool {
|
||||||
is_use_path_start(p) || p.at(T![<]) || p.at(T![Self])
|
is_use_path_start(p) || p.at(T![<]) || p.at(T![Self])
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use crate::grammar::paths::WEAK_DYN_PATH_FIRST;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
|
pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
|
||||||
|
@ -49,6 +51,13 @@ fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) {
|
||||||
T![dyn] => dyn_trait_type(p),
|
T![dyn] => dyn_trait_type(p),
|
||||||
// Some path types are not allowed to have bounds (no plus)
|
// Some path types are not allowed to have bounds (no plus)
|
||||||
T![<] => path_type_bounds(p, allow_bounds),
|
T![<] => path_type_bounds(p, allow_bounds),
|
||||||
|
T![ident]
|
||||||
|
if !p.edition().at_least_2018()
|
||||||
|
&& p.at_contextual_kw(T![dyn])
|
||||||
|
&& WEAK_DYN_PATH_FIRST.contains(p.nth(1)) =>
|
||||||
|
{
|
||||||
|
dyn_trait_type_weak(p)
|
||||||
|
}
|
||||||
_ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds),
|
_ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds),
|
||||||
LIFETIME_IDENT if p.nth_at(1, T![+]) => bare_dyn_trait_type(p),
|
LIFETIME_IDENT if p.nth_at(1, T![+]) => bare_dyn_trait_type(p),
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -279,6 +288,18 @@ fn dyn_trait_type(p: &mut Parser<'_>) {
|
||||||
m.complete(p, DYN_TRAIT_TYPE);
|
m.complete(p, DYN_TRAIT_TYPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// test dyn_trait_type_weak 2015
|
||||||
|
// type A = dyn Iterator<Item=Foo<'a>> + 'a;
|
||||||
|
// type A = &dyn Iterator<Item=Foo<'a>> + 'a;
|
||||||
|
// type A = dyn::Path;
|
||||||
|
fn dyn_trait_type_weak(p: &mut Parser<'_>) {
|
||||||
|
assert!(p.at_contextual_kw(T![dyn]));
|
||||||
|
let m = p.start();
|
||||||
|
p.bump_remap(T![dyn]);
|
||||||
|
generic_params::bounds_without_colon(p);
|
||||||
|
m.complete(p, DYN_TRAIT_TYPE);
|
||||||
|
}
|
||||||
|
|
||||||
// test bare_dyn_types_with_leading_lifetime
|
// test bare_dyn_types_with_leading_lifetime
|
||||||
// type A = 'static + Trait;
|
// type A = 'static + Trait;
|
||||||
// type B = S<'static + Trait>;
|
// type B = S<'static + Trait>;
|
||||||
|
|
|
@ -27,14 +27,14 @@ pub(crate) struct Parser<'t> {
|
||||||
pos: usize,
|
pos: usize,
|
||||||
events: Vec<Event>,
|
events: Vec<Event>,
|
||||||
steps: Cell<u32>,
|
steps: Cell<u32>,
|
||||||
_edition: Edition,
|
edition: Edition,
|
||||||
}
|
}
|
||||||
|
|
||||||
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
|
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
|
||||||
|
|
||||||
impl<'t> Parser<'t> {
|
impl<'t> Parser<'t> {
|
||||||
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
|
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
|
||||||
Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), _edition: edition }
|
Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), edition }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn finish(self) -> Vec<Event> {
|
pub(crate) fn finish(self) -> Vec<Event> {
|
||||||
|
@ -277,6 +277,10 @@ impl<'t> Parser<'t> {
|
||||||
fn push_event(&mut self, event: Event) {
|
fn push_event(&mut self, event: Event) {
|
||||||
self.events.push(event);
|
self.events.push(event);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn edition(&self) -> Edition {
|
||||||
|
self.edition
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [`Parser::start`].
|
/// See [`Parser::start`].
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
LexedStr, Step,
|
Edition, LexedStr, Step,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ pub enum StrStep<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LexedStr<'_> {
|
impl LexedStr<'_> {
|
||||||
pub fn to_input(&self) -> crate::Input {
|
pub fn to_input(&self, edition: Edition) -> crate::Input {
|
||||||
let _p = tracing::info_span!("LexedStr::to_input").entered();
|
let _p = tracing::info_span!("LexedStr::to_input").entered();
|
||||||
let mut res = crate::Input::default();
|
let mut res = crate::Input::default();
|
||||||
let mut was_joint = false;
|
let mut was_joint = false;
|
||||||
|
@ -35,8 +35,11 @@ impl LexedStr<'_> {
|
||||||
was_joint = false
|
was_joint = false
|
||||||
} else if kind == SyntaxKind::IDENT {
|
} else if kind == SyntaxKind::IDENT {
|
||||||
let token_text = self.text(i);
|
let token_text = self.text(i);
|
||||||
let contextual_kw =
|
let contextual_kw = if !edition.at_least_2018() && token_text == "dyn" {
|
||||||
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
|
SyntaxKind::DYN_KW
|
||||||
|
} else {
|
||||||
|
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT)
|
||||||
|
};
|
||||||
res.push_ident(contextual_kw);
|
res.push_ident(contextual_kw);
|
||||||
} else {
|
} else {
|
||||||
if was_joint {
|
if was_joint {
|
||||||
|
|
|
@ -70,7 +70,7 @@ fn parse_err() {
|
||||||
|
|
||||||
fn parse(entry: TopEntryPoint, text: &str, edition: Edition) -> (String, bool) {
|
fn parse(entry: TopEntryPoint, text: &str, edition: Edition) -> (String, bool) {
|
||||||
let lexed = LexedStr::new(edition, text);
|
let lexed = LexedStr::new(edition, text);
|
||||||
let input = lexed.to_input();
|
let input = lexed.to_input(edition);
|
||||||
let output = entry.parse(&input, edition);
|
let output = entry.parse(&input, edition);
|
||||||
|
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
|
@ -83,7 +83,7 @@ fn meta_item() {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
|
fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
|
||||||
let lexed = LexedStr::new(Edition::CURRENT, input);
|
let lexed = LexedStr::new(Edition::CURRENT, input);
|
||||||
let input = lexed.to_input();
|
let input = lexed.to_input(Edition::CURRENT);
|
||||||
|
|
||||||
let mut n_tokens = 0;
|
let mut n_tokens = 0;
|
||||||
for step in entry.parse(&input, Edition::CURRENT).iter() {
|
for step in entry.parse(&input, Edition::CURRENT).iter() {
|
||||||
|
|
|
@ -195,6 +195,13 @@ mod ok {
|
||||||
run_and_expect_no_errors("test_data/parser/inline/ok/dyn_trait_type.rs");
|
run_and_expect_no_errors("test_data/parser/inline/ok/dyn_trait_type.rs");
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
fn dyn_trait_type_weak() {
|
||||||
|
run_and_expect_no_errors_with_edition(
|
||||||
|
"test_data/parser/inline/ok/dyn_trait_type_weak.rs",
|
||||||
|
crate::Edition::Edition2015,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
fn effect_blocks() { run_and_expect_no_errors("test_data/parser/inline/ok/effect_blocks.rs"); }
|
fn effect_blocks() { run_and_expect_no_errors("test_data/parser/inline/ok/effect_blocks.rs"); }
|
||||||
#[test]
|
#[test]
|
||||||
fn exclusive_range_pat() {
|
fn exclusive_range_pat() {
|
||||||
|
|
|
@ -0,0 +1,113 @@
|
||||||
|
SOURCE_FILE
|
||||||
|
TYPE_ALIAS
|
||||||
|
COMMENT "// 2015"
|
||||||
|
WHITESPACE "\n"
|
||||||
|
TYPE_KW "type"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "A"
|
||||||
|
WHITESPACE " "
|
||||||
|
EQ "="
|
||||||
|
WHITESPACE " "
|
||||||
|
DYN_TRAIT_TYPE
|
||||||
|
DYN_KW "dyn"
|
||||||
|
WHITESPACE " "
|
||||||
|
TYPE_BOUND_LIST
|
||||||
|
TYPE_BOUND
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Iterator"
|
||||||
|
GENERIC_ARG_LIST
|
||||||
|
L_ANGLE "<"
|
||||||
|
ASSOC_TYPE_ARG
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Item"
|
||||||
|
EQ "="
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Foo"
|
||||||
|
GENERIC_ARG_LIST
|
||||||
|
L_ANGLE "<"
|
||||||
|
LIFETIME_ARG
|
||||||
|
LIFETIME
|
||||||
|
LIFETIME_IDENT "'a"
|
||||||
|
R_ANGLE ">"
|
||||||
|
R_ANGLE ">"
|
||||||
|
WHITESPACE " "
|
||||||
|
PLUS "+"
|
||||||
|
WHITESPACE " "
|
||||||
|
TYPE_BOUND
|
||||||
|
LIFETIME
|
||||||
|
LIFETIME_IDENT "'a"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n"
|
||||||
|
TYPE_ALIAS
|
||||||
|
TYPE_KW "type"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "A"
|
||||||
|
WHITESPACE " "
|
||||||
|
EQ "="
|
||||||
|
WHITESPACE " "
|
||||||
|
REF_TYPE
|
||||||
|
AMP "&"
|
||||||
|
DYN_TRAIT_TYPE
|
||||||
|
DYN_KW "dyn"
|
||||||
|
WHITESPACE " "
|
||||||
|
TYPE_BOUND_LIST
|
||||||
|
TYPE_BOUND
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Iterator"
|
||||||
|
GENERIC_ARG_LIST
|
||||||
|
L_ANGLE "<"
|
||||||
|
ASSOC_TYPE_ARG
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Item"
|
||||||
|
EQ "="
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Foo"
|
||||||
|
GENERIC_ARG_LIST
|
||||||
|
L_ANGLE "<"
|
||||||
|
LIFETIME_ARG
|
||||||
|
LIFETIME
|
||||||
|
LIFETIME_IDENT "'a"
|
||||||
|
R_ANGLE ">"
|
||||||
|
R_ANGLE ">"
|
||||||
|
WHITESPACE " "
|
||||||
|
PLUS "+"
|
||||||
|
WHITESPACE " "
|
||||||
|
TYPE_BOUND
|
||||||
|
LIFETIME
|
||||||
|
LIFETIME_IDENT "'a"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n"
|
||||||
|
TYPE_ALIAS
|
||||||
|
TYPE_KW "type"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "A"
|
||||||
|
WHITESPACE " "
|
||||||
|
EQ "="
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "dyn"
|
||||||
|
COLON2 "::"
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "Path"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n"
|
|
@ -0,0 +1,4 @@
|
||||||
|
// 2015
|
||||||
|
type A = dyn Iterator<Item=Foo<'a>> + 'a;
|
||||||
|
type A = &dyn Iterator<Item=Foo<'a>> + 'a;
|
||||||
|
type A = dyn::Path;
|
|
@ -5,8 +5,8 @@ use project_model::{CargoConfig, RustLibSource};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
||||||
use ide::{AnalysisHost, AssistResolveStrategy, DiagnosticsConfig, Severity};
|
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
|
||||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
|
|
||||||
use crate::cli::flags;
|
use crate::cli::flags;
|
||||||
|
@ -74,7 +74,11 @@ impl flags::Diagnostics {
|
||||||
found_error = true;
|
found_error = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("{diagnostic:?}");
|
let Diagnostic { code, message, range, severity, .. } = diagnostic;
|
||||||
|
let line_index = db.line_index(range.file_id);
|
||||||
|
let start = line_index.line_col(range.range.start());
|
||||||
|
let end = line_index.line_col(range.range.end());
|
||||||
|
println!("{severity:?} {code:?} from {start:?} to {end:?}: {message}");
|
||||||
}
|
}
|
||||||
|
|
||||||
visited_files.insert(file_id);
|
visited_files.insert(file_id);
|
||||||
|
|
|
@ -12,7 +12,7 @@ pub(crate) use crate::parsing::reparsing::incremental_reparse;
|
||||||
pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
|
pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
let _p = tracing::info_span!("parse_text").entered();
|
let _p = tracing::info_span!("parse_text").entered();
|
||||||
let lexed = parser::LexedStr::new(edition, text);
|
let lexed = parser::LexedStr::new(edition, text);
|
||||||
let parser_input = lexed.to_input();
|
let parser_input = lexed.to_input(edition);
|
||||||
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
|
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
|
||||||
let (node, errors, _eof) = build_tree(lexed, parser_output);
|
let (node, errors, _eof) = build_tree(lexed, parser_output);
|
||||||
(node, errors)
|
(node, errors)
|
||||||
|
@ -25,7 +25,7 @@ pub(crate) fn parse_text_at(
|
||||||
) -> (GreenNode, Vec<SyntaxError>) {
|
) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
let _p = tracing::info_span!("parse_text_at").entered();
|
let _p = tracing::info_span!("parse_text_at").entered();
|
||||||
let lexed = parser::LexedStr::new(edition, text);
|
let lexed = parser::LexedStr::new(edition, text);
|
||||||
let parser_input = lexed.to_input();
|
let parser_input = lexed.to_input(edition);
|
||||||
let parser_output = entry.parse(&parser_input, edition);
|
let parser_output = entry.parse(&parser_input, edition);
|
||||||
let (node, errors, _eof) = build_tree(lexed, parser_output);
|
let (node, errors, _eof) = build_tree(lexed, parser_output);
|
||||||
(node, errors)
|
(node, errors)
|
||||||
|
|
|
@ -92,7 +92,7 @@ fn reparse_block(
|
||||||
let text = get_text_after_edit(node.clone().into(), edit);
|
let text = get_text_after_edit(node.clone().into(), edit);
|
||||||
|
|
||||||
let lexed = parser::LexedStr::new(edition, text.as_str());
|
let lexed = parser::LexedStr::new(edition, text.as_str());
|
||||||
let parser_input = lexed.to_input();
|
let parser_input = lexed.to_input(edition);
|
||||||
if !is_balanced(&lexed) {
|
if !is_balanced(&lexed) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::{
|
||||||
time::{Instant, SystemTime, UNIX_EPOCH},
|
time::{Instant, SystemTime, UNIX_EPOCH},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{bail, format_err};
|
use anyhow::format_err;
|
||||||
use xshell::{cmd, Shell};
|
use xshell::{cmd, Shell};
|
||||||
|
|
||||||
use crate::flags::{self, MeasurementType};
|
use crate::flags::{self, MeasurementType};
|
||||||
|
@ -193,7 +193,7 @@ impl Metrics {
|
||||||
impl Host {
|
impl Host {
|
||||||
fn new(sh: &Shell) -> anyhow::Result<Host> {
|
fn new(sh: &Shell) -> anyhow::Result<Host> {
|
||||||
if cfg!(not(target_os = "linux")) {
|
if cfg!(not(target_os = "linux")) {
|
||||||
bail!("can only collect metrics on Linux ");
|
return Ok(Host { os: "unknown".into(), cpu: "unknown".into(), mem: "unknown".into() });
|
||||||
}
|
}
|
||||||
|
|
||||||
let os = read_field(sh, "/etc/os-release", "PRETTY_NAME=")?.trim_matches('"').to_owned();
|
let os = read_field(sh, "/etc/os-release", "PRETTY_NAME=")?.trim_matches('"').to_owned();
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue