Auto merge of #17520 - Veykril:slim-proc-macro-api, r=Veykril

internal: Cleanup proc-macro-srv some more
This commit is contained in:
bors 2024-06-30 15:12:50 +00:00
commit ea7fdada6a
53 changed files with 352 additions and 349 deletions

1
Cargo.lock generated
View file

@ -1337,7 +1337,6 @@ dependencies = [
"stdx", "stdx",
"text-size", "text-size",
"tracing", "tracing",
"triomphe",
"tt", "tt",
] ]

View file

@ -1,7 +1,5 @@
//! base_db defines basic database traits. The concrete DB is defined by ide. //! base_db defines basic database traits. The concrete DB is defined by ide.
#![warn(rust_2018_idioms, unused_lifetimes)]
mod change; mod change;
mod input; mod input;

View file

@ -1,7 +1,5 @@
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator //! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
#![warn(rust_2018_idioms, unused_lifetimes)]
mod cfg_expr; mod cfg_expr;
mod dnf; mod dnf;
#[cfg(test)] #[cfg(test)]

View file

@ -6,8 +6,6 @@
// addition to `cargo check`. Either split it into 3 crates (one for test, one for check // addition to `cargo check`. Either split it into 3 crates (one for test, one for check
// and one common utilities) or change its name and docs to reflect the current state. // and one common utilities) or change its name and docs to reflect the current state.
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{fmt, io, process::Command, time::Duration}; use std::{fmt, io, process::Command, time::Duration};
use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender};

View file

@ -7,7 +7,6 @@
//! Note that `hir_def` is a work in progress, so not all of the above is //! Note that `hir_def` is a work in progress, so not all of the above is
//! actually true. //! actually true.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[cfg(feature = "in-rust-tree")] #[cfg(feature = "in-rust-tree")]

View file

@ -4,7 +4,6 @@
//! tree originates not from the text of some `FileId`, but from some macro //! tree originates not from the text of some `FileId`, but from some macro
//! expansion. //! expansion.
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod attrs; pub mod attrs;
pub mod builtin_attr_macro; pub mod builtin_attr_macro;

View file

@ -1,6 +1,6 @@
//! The type system. We currently use this to infer types for completion, hover //! The type system. We currently use this to infer types for completion, hover
//! information and various assists. //! information and various assists.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[cfg(feature = "in-rust-tree")] #[cfg(feature = "in-rust-tree")]

View file

@ -17,7 +17,6 @@
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>. //! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "512"] #![recursion_limit = "512"]

View file

@ -58,8 +58,6 @@
//! See also this post: //! See also this post:
//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html> //! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
#![warn(rust_2018_idioms, unused_lifetimes)]
mod assist_config; mod assist_config;
mod assist_context; mod assist_context;
#[cfg(test)] #[cfg(test)]

View file

@ -1,7 +1,5 @@
//! `completions` crate provides utilities for generating completions of user input. //! `completions` crate provides utilities for generating completions of user input.
#![warn(rust_2018_idioms, unused_lifetimes)]
mod completions; mod completions;
mod config; mod config;
mod context; mod context;

View file

@ -2,8 +2,6 @@
//! //!
//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
#![warn(rust_2018_idioms, unused_lifetimes)]
mod apply_change; mod apply_change;
pub mod active_parameter; pub mod active_parameter;

View file

@ -23,8 +23,6 @@
//! There are also a couple of ad-hoc diagnostics implemented directly here, we //! There are also a couple of ad-hoc diagnostics implemented directly here, we
//! don't yet have a great pattern for how to do them properly. //! don't yet have a great pattern for how to do them properly.
#![warn(rust_2018_idioms, unused_lifetimes)]
mod handlers { mod handlers {
pub(crate) mod break_outside_of_loop; pub(crate) mod break_outside_of_loop;
pub(crate) mod expected_function; pub(crate) mod expected_function;

View file

@ -3,8 +3,6 @@
//! Allows searching the AST for code that matches one or more patterns and then replacing that code //! Allows searching the AST for code that matches one or more patterns and then replacing that code
//! based on a template. //! based on a template.
#![warn(rust_2018_idioms, unused_lifetimes)]
// Feature: Structural Search and Replace // Feature: Structural Search and Replace
// //
// Search and replace with named wildcards that will match any expression, type, path, pattern or item. // Search and replace with named wildcards that will match any expression, type, path, pattern or item.

View file

@ -8,7 +8,7 @@
//! in this crate. //! in this crate.
// For proving that RootDatabase is RefUnwindSafe. // For proving that RootDatabase is RefUnwindSafe.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "128"] #![recursion_limit = "128"]

View file

@ -1,7 +1,5 @@
//! limit defines a struct to enforce limits. //! limit defines a struct to enforce limits.
#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(feature = "tracking")] #[cfg(feature = "tracking")]
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;

View file

@ -6,8 +6,6 @@
//! The tests for this functionality live in another crate: //! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`. //! `hir_def::macro_expansion_tests::mbe`.
#![warn(rust_2018_idioms, unused_lifetimes)]
mod expander; mod expander;
mod parser; mod parser;
mod syntax_bridge; mod syntax_bridge;

View file

@ -17,7 +17,6 @@
//! //!
//! [`Parser`]: crate::parser::Parser //! [`Parser`]: crate::parser::Parser
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(rustdoc::private_intra_doc_links)] #![allow(rustdoc::private_intra_doc_links)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]

View file

@ -1,8 +1,6 @@
//! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and //! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and
//! relative paths. //! relative paths.
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{ use std::{
borrow::Borrow, borrow::Borrow,
ffi::OsStr, ffi::OsStr,

View file

@ -15,7 +15,6 @@ doctest = false
serde.workspace = true serde.workspace = true
serde_json = { workspace = true, features = ["unbounded_depth"] } serde_json = { workspace = true, features = ["unbounded_depth"] }
tracing.workspace = true tracing.workspace = true
triomphe.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
indexmap.workspace = true indexmap.workspace = true

View file

@ -0,0 +1,35 @@
//! Protocol functions for json.
use std::io::{self, BufRead, Write};
pub fn read_json<'a>(
inp: &mut impl BufRead,
buf: &'a mut String,
) -> io::Result<Option<&'a String>> {
loop {
buf.clear();
inp.read_line(buf)?;
buf.pop(); // Remove trailing '\n'
if buf.is_empty() {
return Ok(None);
}
// Some ill behaved macro try to use stdout for debugging
// We ignore it here
if !buf.starts_with('{') {
tracing::error!("proc-macro tried to print : {}", buf);
continue;
}
return Ok(Some(buf));
}
}
pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
tracing::debug!("> {}", msg);
out.write_all(msg.as_bytes())?;
out.write_all(b"\n")?;
out.flush()?;
Ok(())
}

View file

@ -5,27 +5,23 @@
//! is used to provide basic infrastructure for communication between two //! is used to provide basic infrastructure for communication between two
//! processes: Client (RA itself), Server (the external program) //! processes: Client (RA itself), Server (the external program)
#![warn(rust_2018_idioms, unused_lifetimes)] pub mod json;
pub mod msg; pub mod msg;
mod process; mod process;
use base_db::Env; use base_db::Env;
use indexmap::IndexSet;
use paths::{AbsPath, AbsPathBuf}; use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use span::Span; use span::Span;
use std::{ use std::{fmt, io, sync::Arc};
fmt, io, use tt::SmolStr;
sync::{Arc, Mutex},
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
msg::{ msg::{
deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro, deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro,
ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, ExpnGlobals, FlatTree, PanicMessage, SpanDataIndexMap, HAS_GLOBAL_SPANS,
RUST_ANALYZER_SPAN_SUPPORT,
}, },
process::ProcMacroProcessSrv, process::ProcMacroProcessSrv,
}; };
@ -48,9 +44,7 @@ pub struct ProcMacroServer {
/// ///
/// That means that concurrent salsa requests may block each other when expanding proc macros, /// That means that concurrent salsa requests may block each other when expanding proc macros,
/// which is unfortunate, but simple and good enough for the time being. /// which is unfortunate, but simple and good enough for the time being.
/// process: Arc<ProcMacroProcessSrv>,
/// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
process: Arc<Mutex<ProcMacroProcessSrv>>,
path: AbsPathBuf, path: AbsPathBuf,
} }
@ -70,9 +64,9 @@ impl MacroDylib {
/// we share a single expander process for all macros. /// we share a single expander process for all macros.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ProcMacro { pub struct ProcMacro {
process: Arc<Mutex<ProcMacroProcessSrv>>, process: Arc<ProcMacroProcessSrv>,
dylib_path: AbsPathBuf, dylib_path: Arc<AbsPathBuf>,
name: String, name: SmolStr,
kind: ProcMacroKind, kind: ProcMacroKind,
} }
@ -81,7 +75,7 @@ impl PartialEq for ProcMacro {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.name == other.name self.name == other.name
&& self.kind == other.kind && self.kind == other.kind
&& self.dylib_path == other.dylib_path && Arc::ptr_eq(&self.dylib_path, &other.dylib_path)
&& Arc::ptr_eq(&self.process, &other.process) && Arc::ptr_eq(&self.process, &other.process)
} }
} }
@ -89,7 +83,6 @@ impl PartialEq for ProcMacro {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ServerError { pub struct ServerError {
pub message: String, pub message: String,
// io::Error isn't Clone for some reason
pub io: Option<Arc<io::Error>>, pub io: Option<Arc<io::Error>>,
} }
@ -104,21 +97,15 @@ impl fmt::Display for ServerError {
} }
} }
pub struct MacroPanic {
pub message: String,
}
impl ProcMacroServer { impl ProcMacroServer {
/// Spawns an external process as the proc macro server and returns a client connected to it. /// Spawns an external process as the proc macro server and returns a client connected to it.
pub fn spawn( pub fn spawn(
process_path: &AbsPath, process_path: &AbsPath,
env: &FxHashMap<String, String>, env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>
+ Clone,
) -> io::Result<ProcMacroServer> { ) -> io::Result<ProcMacroServer> {
let process = ProcMacroProcessSrv::run(process_path, env)?; let process = ProcMacroProcessSrv::run(process_path, env)?;
Ok(ProcMacroServer { Ok(ProcMacroServer { process: Arc::new(process), path: process_path.to_owned() })
process: Arc::new(Mutex::new(process)),
path: process_path.to_owned(),
})
} }
pub fn path(&self) -> &AbsPath { pub fn path(&self) -> &AbsPath {
@ -127,17 +114,17 @@ impl ProcMacroServer {
pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> { pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered(); let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
let macros = let macros = self.process.find_proc_macros(&dylib.path)?;
self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
let dylib_path = Arc::new(dylib.path);
match macros { match macros {
Ok(macros) => Ok(macros Ok(macros) => Ok(macros
.into_iter() .into_iter()
.map(|(name, kind)| ProcMacro { .map(|(name, kind)| ProcMacro {
process: self.process.clone(), process: self.process.clone(),
name, name: name.into(),
kind, kind,
dylib_path: dylib.path.clone(), dylib_path: dylib_path.clone(),
}) })
.collect()), .collect()),
Err(message) => Err(ServerError { message, io: None }), Err(message) => Err(ServerError { message, io: None }),
@ -163,20 +150,19 @@ impl ProcMacro {
call_site: Span, call_site: Span,
mixed_site: Span, mixed_site: Span,
) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> { ) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let version = self.process.version();
let current_dir = env.get("CARGO_MANIFEST_DIR"); let current_dir = env.get("CARGO_MANIFEST_DIR");
let mut span_data_table = IndexSet::default(); let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0; let def_site = span_data_table.insert_full(def_site).0;
let call_site = span_data_table.insert_full(call_site).0; let call_site = span_data_table.insert_full(call_site).0;
let mixed_site = span_data_table.insert_full(mixed_site).0; let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro { let task = ExpandMacro {
data: msg::ExpandMacroData {
macro_body: FlatTree::new(subtree, version, &mut span_data_table), macro_body: FlatTree::new(subtree, version, &mut span_data_table),
macro_name: self.name.to_string(), macro_name: self.name.to_string(),
attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), attributes: attr
lib: self.dylib_path.to_path_buf().into(), .map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
env: env.into(),
current_dir,
has_global_spans: ExpnGlobals { has_global_spans: ExpnGlobals {
serialize: version >= HAS_GLOBAL_SPANS, serialize: version >= HAS_GLOBAL_SPANS,
def_site, def_site,
@ -188,13 +174,13 @@ impl ProcMacro {
} else { } else {
Vec::new() Vec::new()
}, },
},
lib: self.dylib_path.to_path_buf().into(),
env: env.into(),
current_dir,
}; };
let response = self let response = self.process.send_task(msg::Request::ExpandMacro(Box::new(task)))?;
.process
.lock()
.unwrap_or_else(|e| e.into_inner())
.send_task(msg::Request::ExpandMacro(Box::new(task)))?;
match response { match response {
msg::Response::ExpandMacro(it) => { msg::Response::ExpandMacro(it) => {

View file

@ -72,6 +72,16 @@ pub struct PanicMessage(pub String);
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct ExpandMacro { pub struct ExpandMacro {
pub lib: Utf8PathBuf,
/// Environment variables to set during macro expansion.
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
#[serde(flatten)]
pub data: ExpandMacroData,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ExpandMacroData {
/// Argument of macro call. /// Argument of macro call.
/// ///
/// In custom derive this will be a struct or enum; in attribute-like macro - underlying /// In custom derive this will be a struct or enum; in attribute-like macro - underlying
@ -86,13 +96,6 @@ pub struct ExpandMacro {
/// Possible attributes for the attribute-like macros. /// Possible attributes for the attribute-like macros.
pub attributes: Option<FlatTree>, pub attributes: Option<FlatTree>,
pub lib: Utf8PathBuf,
/// Environment variables to set during macro expansion.
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
/// marker for serde skip stuff /// marker for serde skip stuff
#[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
#[serde(default)] #[serde(default)]
@ -119,8 +122,12 @@ impl ExpnGlobals {
} }
pub trait Message: Serialize + DeserializeOwned { pub trait Message: Serialize + DeserializeOwned {
fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> { fn read<R: BufRead>(
Ok(match read_json(inp, buf)? { from_proto: ProtocolRead<R>,
inp: &mut R,
buf: &mut String,
) -> io::Result<Option<Self>> {
Ok(match from_proto(inp, buf)? {
None => None, None => None,
Some(text) => { Some(text) => {
let mut deserializer = serde_json::Deserializer::from_str(text); let mut deserializer = serde_json::Deserializer::from_str(text);
@ -131,44 +138,20 @@ pub trait Message: Serialize + DeserializeOwned {
} }
}) })
} }
fn write(self, out: &mut impl Write) -> io::Result<()> { fn write<W: Write>(self, to_proto: ProtocolWrite<W>, out: &mut W) -> io::Result<()> {
let text = serde_json::to_string(&self)?; let text = serde_json::to_string(&self)?;
write_json(out, &text) to_proto(out, &text)
} }
} }
impl Message for Request {} impl Message for Request {}
impl Message for Response {} impl Message for Response {}
fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> { #[allow(type_alias_bounds)]
loop { type ProtocolRead<R: BufRead> =
buf.clear(); for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result<Option<&'buf String>>;
#[allow(type_alias_bounds)]
inp.read_line(buf)?; type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>;
buf.pop(); // Remove trailing '\n'
if buf.is_empty() {
return Ok(None);
}
// Some ill behaved macro try to use stdout for debugging
// We ignore it here
if !buf.starts_with('{') {
tracing::error!("proc-macro tried to print : {}", buf);
continue;
}
return Ok(Some(buf));
}
}
fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
tracing::debug!("> {}", msg);
out.write_all(msg.as_bytes())?;
out.write_all(b"\n")?;
out.flush()?;
Ok(())
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
@ -268,12 +251,10 @@ mod tests {
let tt = fixture_token_tree(); let tt = fixture_token_tree();
let mut span_data_table = Default::default(); let mut span_data_table = Default::default();
let task = ExpandMacro { let task = ExpandMacro {
data: ExpandMacroData {
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(), macro_name: Default::default(),
attributes: None, attributes: None,
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
env: Default::default(),
current_dir: Default::default(),
has_global_spans: ExpnGlobals { has_global_spans: ExpnGlobals {
serialize: true, serialize: true,
def_site: 0, def_site: 0,
@ -281,12 +262,19 @@ mod tests {
mixed_site: 0, mixed_site: 0,
}, },
span_data_table: Vec::new(), span_data_table: Vec::new(),
},
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
env: Default::default(),
current_dir: Default::default(),
}; };
let json = serde_json::to_string(&task).unwrap(); let json = serde_json::to_string(&task).unwrap();
// println!("{}", json); // println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap(); let back: ExpandMacro = serde_json::from_str(&json).unwrap();
assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); assert_eq!(
tt,
back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
);
} }
} }

View file

@ -37,7 +37,6 @@
use std::collections::VecDeque; use std::collections::VecDeque;
use indexmap::IndexSet;
use la_arena::RawIdx; use la_arena::RawIdx;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -46,7 +45,8 @@ use text_size::TextRange;
use crate::msg::ENCODE_CLOSE_SPAN_VERSION; use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
pub type SpanDataIndexMap = IndexSet<Span>; pub type SpanDataIndexMap =
indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> { pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> {
map.iter() map.iter()
@ -328,7 +328,7 @@ impl InternableSpan for TokenId {
} }
} }
impl InternableSpan for Span { impl InternableSpan for Span {
type Table = IndexSet<Span>; type Table = SpanDataIndexMap;
fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
TokenId(table.insert_full(span).0 as u32) TokenId(table.insert_full(span).0 as u32)
} }

View file

@ -3,43 +3,48 @@
use std::{ use std::{
io::{self, BufRead, BufReader, Read, Write}, io::{self, BufRead, BufReader, Read, Write},
process::{Child, ChildStdin, ChildStdout, Command, Stdio}, process::{Child, ChildStdin, ChildStdout, Command, Stdio},
sync::Arc, sync::{Arc, Mutex},
}; };
use paths::AbsPath; use paths::AbsPath;
use rustc_hash::FxHashMap;
use stdx::JodChild; use stdx::JodChild;
use crate::{ use crate::{
json::{read_json, write_json},
msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT}, msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT},
ProcMacroKind, ServerError, ProcMacroKind, ServerError,
}; };
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct ProcMacroProcessSrv { pub(crate) struct ProcMacroProcessSrv {
/// The state of the proc-macro server process, the protocol is currently strictly sequential
/// hence the lock on the state.
state: Mutex<ProcessSrvState>,
version: u32,
mode: SpanMode,
}
#[derive(Debug)]
struct ProcessSrvState {
process: Process, process: Process,
stdin: ChildStdin, stdin: ChildStdin,
stdout: BufReader<ChildStdout>, stdout: BufReader<ChildStdout>,
/// Populated when the server exits. /// Populated when the server exits.
server_exited: Option<ServerError>, server_exited: Option<ServerError>,
version: u32,
mode: SpanMode,
} }
impl ProcMacroProcessSrv { impl ProcMacroProcessSrv {
pub(crate) fn run( pub(crate) fn run(
process_path: &AbsPath, process_path: &AbsPath,
env: &FxHashMap<String, String>, env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>
+ Clone,
) -> io::Result<ProcMacroProcessSrv> { ) -> io::Result<ProcMacroProcessSrv> {
let create_srv = |null_stderr| { let create_srv = |null_stderr| {
let mut process = Process::run(process_path, env, null_stderr)?; let mut process = Process::run(process_path, env.clone(), null_stderr)?;
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
io::Result::Ok(ProcMacroProcessSrv { io::Result::Ok(ProcMacroProcessSrv {
process, state: Mutex::new(ProcessSrvState { process, stdin, stdout, server_exited: None }),
stdin,
stdout,
server_exited: None,
version: 0, version: 0,
mode: SpanMode::Id, mode: SpanMode::Id,
}) })
@ -76,7 +81,7 @@ impl ProcMacroProcessSrv {
self.version self.version
} }
pub(crate) fn version_check(&mut self) -> Result<u32, ServerError> { fn version_check(&self) -> Result<u32, ServerError> {
let request = Request::ApiVersionCheck {}; let request = Request::ApiVersionCheck {};
let response = self.send_task(request)?; let response = self.send_task(request)?;
@ -86,7 +91,7 @@ impl ProcMacroProcessSrv {
} }
} }
fn enable_rust_analyzer_spans(&mut self) -> Result<SpanMode, ServerError> { fn enable_rust_analyzer_spans(&self) -> Result<SpanMode, ServerError> {
let request = Request::SetConfig(crate::msg::ServerConfig { let request = Request::SetConfig(crate::msg::ServerConfig {
span_mode: crate::msg::SpanMode::RustAnalyzer, span_mode: crate::msg::SpanMode::RustAnalyzer,
}); });
@ -99,7 +104,7 @@ impl ProcMacroProcessSrv {
} }
pub(crate) fn find_proc_macros( pub(crate) fn find_proc_macros(
&mut self, &self,
dylib_path: &AbsPath, dylib_path: &AbsPath,
) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> { ) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() };
@ -112,20 +117,21 @@ impl ProcMacroProcessSrv {
} }
} }
pub(crate) fn send_task(&mut self, req: Request) -> Result<Response, ServerError> { pub(crate) fn send_task(&self, req: Request) -> Result<Response, ServerError> {
if let Some(server_error) = &self.server_exited { let state = &mut *self.state.lock().unwrap();
if let Some(server_error) = &state.server_exited {
return Err(server_error.clone()); return Err(server_error.clone());
} }
let mut buf = String::new(); let mut buf = String::new();
send_request(&mut self.stdin, &mut self.stdout, req, &mut buf).map_err(|e| { send_request(&mut state.stdin, &mut state.stdout, req, &mut buf).map_err(|e| {
if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) {
match self.process.child.try_wait() { match state.process.child.try_wait() {
Ok(None) => e, Ok(None) => e,
Ok(Some(status)) => { Ok(Some(status)) => {
let mut msg = String::new(); let mut msg = String::new();
if !status.success() { if !status.success() {
if let Some(stderr) = self.process.child.stderr.as_mut() { if let Some(stderr) = state.process.child.stderr.as_mut() {
_ = stderr.read_to_string(&mut msg); _ = stderr.read_to_string(&mut msg);
} }
} }
@ -133,7 +139,7 @@ impl ProcMacroProcessSrv {
message: format!("server exited with {status}: {msg}"), message: format!("server exited with {status}: {msg}"),
io: None, io: None,
}; };
self.server_exited = Some(server_error.clone()); state.server_exited = Some(server_error.clone());
server_error server_error
} }
Err(_) => e, Err(_) => e,
@ -153,7 +159,7 @@ struct Process {
impl Process { impl Process {
fn run( fn run(
path: &AbsPath, path: &AbsPath,
env: &FxHashMap<String, String>, env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>,
null_stderr: bool, null_stderr: bool,
) -> io::Result<Process> { ) -> io::Result<Process> {
let child = JodChild(mk_child(path, env, null_stderr)?); let child = JodChild(mk_child(path, env, null_stderr)?);
@ -171,7 +177,7 @@ impl Process {
fn mk_child( fn mk_child(
path: &AbsPath, path: &AbsPath,
env: &FxHashMap<String, String>, env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>,
null_stderr: bool, null_stderr: bool,
) -> io::Result<Child> { ) -> io::Result<Child> {
let mut cmd = Command::new(path); let mut cmd = Command::new(path);
@ -196,11 +202,11 @@ fn send_request(
req: Request, req: Request,
buf: &mut String, buf: &mut String,
) -> Result<Response, ServerError> { ) -> Result<Response, ServerError> {
req.write(&mut writer).map_err(|err| ServerError { req.write(write_json, &mut writer).map_err(|err| ServerError {
message: "failed to write request".into(), message: "failed to write request".into(),
io: Some(Arc::new(err)), io: Some(Arc::new(err)),
})?; })?;
let res = Response::read(&mut reader, buf).map_err(|err| ServerError { let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError {
message: "failed to read response".into(), message: "failed to read response".into(),
io: Some(Arc::new(err)), io: Some(Arc::new(err)),
})?; })?;

View file

@ -6,6 +6,8 @@
#[cfg(feature = "in-rust-tree")] #[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _; extern crate rustc_driver as _;
use proc_macro_api::json::{read_json, write_json};
use std::io; use std::io;
fn main() -> std::io::Result<()> { fn main() -> std::io::Result<()> {
@ -26,19 +28,49 @@ fn main() -> std::io::Result<()> {
#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] #[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
fn run() -> io::Result<()> { fn run() -> io::Result<()> {
eprintln!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); let err = "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function";
std::process::exit(70); eprintln!("{err}");
use proc_macro_api::msg::{self, Message};
let read_request =
|buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf);
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
let mut buf = String::new();
while let Some(req) = read_request(&mut buf)? {
let res = match req {
msg::Request::ListMacros { .. } => msg::Response::ListMacros(Err(err.to_owned())),
msg::Request::ExpandMacro(_) => {
msg::Response::ExpandMacro(Err(msg::PanicMessage(err.to_owned())))
}
msg::Request::ApiVersionCheck {} => {
msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION)
}
msg::Request::SetConfig(_) => {
msg::Response::SetConfig(proc_macro_api::msg::ServerConfig {
span_mode: msg::SpanMode::Id,
})
}
};
write_response(res)?
}
Ok(())
} }
#[cfg(any(feature = "sysroot-abi", rust_analyzer))] #[cfg(any(feature = "sysroot-abi", rust_analyzer))]
fn run() -> io::Result<()> { fn run() -> io::Result<()> {
use proc_macro_api::msg::{self, Message}; use proc_macro_api::msg::{self, Message};
use proc_macro_srv::EnvSnapshot;
let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf); let read_request =
|buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf);
let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock()); let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
let mut srv = proc_macro_srv::ProcMacroSrv::default(); let env = EnvSnapshot::new();
let mut srv = proc_macro_srv::ProcMacroSrv::new(&env);
let mut buf = String::new(); let mut buf = String::new();
while let Some(req) = read_request(&mut buf)? { while let Some(req) = read_request(&mut buf)? {

View file

@ -1,27 +1,15 @@
//! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is //! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is
//! build with and make it accessible at runtime for ABI selection. //! build with and make it accessible at runtime for ABI selection.
use std::{env, fs::File, io::Write, path::PathBuf, process::Command}; use std::{env, process::Command};
fn main() { fn main() {
println!("cargo:rustc-check-cfg=cfg(rust_analyzer)"); println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
path.push("rustc_version.rs");
let mut f = File::create(&path).unwrap();
let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set"); let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run"); let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
let version_string = std::str::from_utf8(&output.stdout[..]) let version_string = std::str::from_utf8(&output.stdout[..])
.expect("rustc --version output must be UTF-8") .expect("rustc --version output must be UTF-8")
.trim(); .trim();
println!("cargo::rustc-env=RUSTC_VERSION={}", version_string);
write!(
f,
"
#[allow(dead_code)]
pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?};
"
)
.unwrap();
} }

View file

@ -8,7 +8,7 @@
//! 1.58) and future ABIs (stage1, nightly) //! 1.58) and future ABIs (stage1, nightly)
use std::{ use std::{
env, fs, env,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command, process::Command,
}; };
@ -30,8 +30,7 @@ fn main() {
if !has_features { if !has_features {
println!("proc-macro-test testing only works on nightly toolchains"); println!("proc-macro-test testing only works on nightly toolchains");
let info_path = out_dir.join("proc_macro_test_location.txt"); println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION=\"\"");
fs::File::create(info_path).unwrap();
return; return;
} }
@ -121,6 +120,5 @@ fn main() {
// This file is under `target_dir` and is already under `OUT_DIR`. // This file is under `target_dir` and is already under `OUT_DIR`.
let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found"); let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found");
let info_path = out_dir.join("proc_macro_test_location.txt"); println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION={}", artifact_path.display());
fs::write(info_path, artifact_path.to_str().unwrap()).unwrap();
} }

View file

@ -1,6 +1,6 @@
//! Exports a few trivial procedural macros for testing. //! Exports a few trivial procedural macros for testing.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![feature(proc_macro_span, proc_macro_def_site)] #![feature(proc_macro_span, proc_macro_def_site)]
#![allow(clippy::all)] #![allow(clippy::all)]

View file

@ -1,6 +1,3 @@
//! Exports a few trivial procedural macros for testing. //! Exports a few trivial procedural macros for testing.
#![warn(rust_2018_idioms, unused_lifetimes)] pub static PROC_MACRO_TEST_LOCATION: &str = env!("PROC_MACRO_TEST_LOCATION");
pub static PROC_MACRO_TEST_LOCATION: &str =
include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt"));

View file

@ -2,13 +2,13 @@
mod version; mod version;
use proc_macro::bridge;
use std::{fmt, fs::File, io}; use std::{fmt, fs::File, io};
use libloading::Library; use libloading::Library;
use memmap2::Mmap; use memmap2::Mmap;
use object::Object; use object::Object;
use paths::{AbsPath, Utf8Path, Utf8PathBuf}; use paths::{AbsPath, Utf8Path, Utf8PathBuf};
use proc_macro::bridge;
use proc_macro_api::ProcMacroKind; use proc_macro_api::ProcMacroKind;
use crate::ProcMacroSrvSpan; use crate::ProcMacroSrvSpan;
@ -133,24 +133,33 @@ impl ProcMacroLibraryLibloading {
} }
} }
pub struct Expander { pub(crate) struct Expander {
inner: ProcMacroLibraryLibloading, inner: ProcMacroLibraryLibloading,
path: Utf8PathBuf,
}
impl Drop for Expander {
fn drop(&mut self) {
#[cfg(windows)]
std::fs::remove_file(&self.path).ok();
_ = self.path;
}
} }
impl Expander { impl Expander {
pub fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> { pub(crate) fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> {
// Some libraries for dynamic loading require canonicalized path even when it is // Some libraries for dynamic loading require canonicalized path even when it is
// already absolute // already absolute
let lib = lib.canonicalize_utf8()?; let lib = lib.canonicalize_utf8()?;
let lib = ensure_file_with_lock_free_access(&lib)?; let path = ensure_file_with_lock_free_access(&lib)?;
let library = ProcMacroLibraryLibloading::open(lib.as_ref())?; let library = ProcMacroLibraryLibloading::open(path.as_ref())?;
Ok(Expander { inner: library }) Ok(Expander { inner: library, path })
} }
pub fn expand<S: ProcMacroSrvSpan>( pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self, &self,
macro_name: &str, macro_name: &str,
macro_body: tt::Subtree<S>, macro_body: tt::Subtree<S>,
@ -169,7 +178,7 @@ impl Expander {
result.map_err(|e| e.into_string().unwrap_or_default()) result.map_err(|e| e.into_string().unwrap_or_default())
} }
pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
self.inner.proc_macros.list_macros() self.inner.proc_macros.list_macros()
} }
} }
@ -198,7 +207,7 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result<Utf8PathBuf>
unique_name.push_str(file_name); unique_name.push_str(file_name);
to.push(unique_name); to.push(unique_name);
std::fs::copy(path, &to).unwrap(); std::fs::copy(path, &to)?;
Ok(to) Ok(to)
} }

View file

@ -13,7 +13,6 @@
#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(unreachable_pub, internal_features)] #![allow(unreachable_pub, internal_features)]
extern crate proc_macro; extern crate proc_macro;
@ -27,13 +26,15 @@ extern crate rustc_lexer;
mod dylib; mod dylib;
mod proc_macros; mod proc_macros;
mod server; mod server_impl;
use std::{ use std::{
collections::{hash_map::Entry, HashMap}, collections::{hash_map::Entry, HashMap},
env, env,
ffi::OsString, ffi::OsString,
fs, thread, fs,
path::{Path, PathBuf},
thread,
time::SystemTime, time::SystemTime,
}; };
@ -47,46 +48,25 @@ use proc_macro_api::{
}; };
use span::Span; use span::Span;
use crate::server::TokenStream; use crate::server_impl::TokenStream;
// see `build.rs` pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION");
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
trait ProcMacroSrvSpan: Copy { pub struct ProcMacroSrv<'env> {
type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
impl ProcMacroSrvSpan for TokenId {
type Server = server::token_id::TokenIdServer;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site }
}
}
impl ProcMacroSrvSpan for Span {
type Server = server::rust_analyzer_span::RaSpanServer;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
Self::Server {
interner: &server::SYMBOL_INTERNER,
call_site,
def_site,
mixed_site,
tracked_env_vars: Default::default(),
tracked_paths: Default::default(),
}
}
}
#[derive(Default)]
pub struct ProcMacroSrv {
expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>, expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>,
span_mode: SpanMode, span_mode: SpanMode,
env: &'env EnvSnapshot,
}
impl<'env> ProcMacroSrv<'env> {
pub fn new(env: &'env EnvSnapshot) -> Self {
Self { expanders: Default::default(), span_mode: Default::default(), env }
}
} }
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
impl ProcMacroSrv { impl<'env> ProcMacroSrv<'env> {
pub fn set_span_mode(&mut self, span_mode: SpanMode) { pub fn set_span_mode(&mut self, span_mode: SpanMode) {
self.span_mode = span_mode; self.span_mode = span_mode;
} }
@ -97,52 +77,24 @@ impl ProcMacroSrv {
pub fn expand( pub fn expand(
&mut self, &mut self,
task: msg::ExpandMacro, msg::ExpandMacro { lib, env, current_dir, data }: msg::ExpandMacro,
) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> { ) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> {
let span_mode = self.span_mode; let span_mode = self.span_mode;
let expander = self.expander(task.lib.as_ref()).map_err(|err| { let snapped_env = self.env;
let expander = self.expander(lib.as_ref()).map_err(|err| {
debug_assert!(false, "should list macros before asking to expand"); debug_assert!(false, "should list macros before asking to expand");
msg::PanicMessage(format!("failed to load macro: {err}")) msg::PanicMessage(format!("failed to load macro: {err}"))
})?; })?;
let prev_env = EnvSnapshot::new(); let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref));
for (k, v) in &task.env {
env::set_var(k, v);
}
let prev_working_dir = match &task.current_dir {
Some(dir) => {
let prev_working_dir = std::env::current_dir().ok();
if let Err(err) = std::env::set_current_dir(dir) {
eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}")
}
prev_working_dir
}
None => None,
};
let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
let result = match span_mode { let result = match span_mode {
SpanMode::Id => { SpanMode::Id => expand_id(data, expander).map(|it| (it, vec![])),
expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![])) SpanMode::RustAnalyzer => expand_ra_span(data, expander),
}
SpanMode::RustAnalyzer => {
expand_ra_span(task, expander, def_site, call_site, mixed_site)
}
}; };
prev_env.rollback(); prev_env.rollback();
if let Some(dir) = prev_working_dir {
if let Err(err) = std::env::set_current_dir(&dir) {
eprintln!(
"Failed to set the current working dir to {}. Error: {:?}",
dir.display(),
err
)
}
}
result.map_err(msg::PanicMessage) result.map_err(msg::PanicMessage)
} }
@ -169,33 +121,55 @@ impl ProcMacroSrv {
} }
} }
trait ProcMacroSrvSpan: Copy {
type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
impl ProcMacroSrvSpan for TokenId {
type Server = server_impl::token_id::TokenIdServer;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
Self::Server { interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site }
}
}
impl ProcMacroSrvSpan for Span {
type Server = server_impl::rust_analyzer_span::RaSpanServer;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
Self::Server {
interner: &server_impl::SYMBOL_INTERNER,
call_site,
def_site,
mixed_site,
tracked_env_vars: Default::default(),
tracked_paths: Default::default(),
}
}
}
fn expand_id( fn expand_id(
task: msg::ExpandMacro, msg::ExpandMacroData {
macro_body,
macro_name,
attributes,
has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site },
span_data_table: _,
}: msg::ExpandMacroData,
expander: &dylib::Expander, expander: &dylib::Expander,
def_site: usize,
call_site: usize,
mixed_site: usize,
) -> Result<msg::FlatTree, String> { ) -> Result<msg::FlatTree, String> {
let def_site = TokenId(def_site as u32); let def_site = TokenId(def_site as u32);
let call_site = TokenId(call_site as u32); let call_site = TokenId(call_site as u32);
let mixed_site = TokenId(mixed_site as u32); let mixed_site = TokenId(mixed_site as u32);
let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); let macro_body = macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); let attributes = attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
let result = thread::scope(|s| { let result = thread::scope(|s| {
let thread = thread::Builder::new() let thread = thread::Builder::new()
.stack_size(EXPANDER_STACK_SIZE) .stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone()) .name(macro_name.clone())
.spawn_scoped(s, || { .spawn_scoped(s, || {
expander expander
.expand( .expand(&macro_name, macro_body, attributes, def_site, call_site, mixed_site)
&task.macro_name,
macro_body,
attributes,
def_site,
call_site,
mixed_site,
)
.map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
}); });
let res = match thread { let res = match thread {
@ -212,35 +186,31 @@ fn expand_id(
} }
fn expand_ra_span( fn expand_ra_span(
task: msg::ExpandMacro, msg::ExpandMacroData {
macro_body,
macro_name,
attributes,
has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site },
span_data_table,
}: msg::ExpandMacroData,
expander: &dylib::Expander, expander: &dylib::Expander,
def_site: usize,
call_site: usize,
mixed_site: usize,
) -> Result<(msg::FlatTree, Vec<u32>), String> { ) -> Result<(msg::FlatTree, Vec<u32>), String> {
let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table); let mut span_data_table = deserialize_span_data_index_map(&span_data_table);
let def_site = span_data_table[def_site]; let def_site = span_data_table[def_site];
let call_site = span_data_table[call_site]; let call_site = span_data_table[call_site];
let mixed_site = span_data_table[mixed_site]; let mixed_site = span_data_table[mixed_site];
let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); let macro_body = macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table);
let attributes = let attributes =
task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
let result = thread::scope(|s| { let result = thread::scope(|s| {
let thread = thread::Builder::new() let thread = thread::Builder::new()
.stack_size(EXPANDER_STACK_SIZE) .stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone()) .name(macro_name.clone())
.spawn_scoped(s, || { .spawn_scoped(s, || {
expander expander
.expand( .expand(&macro_name, macro_body, attributes, def_site, call_site, mixed_site)
&task.macro_name,
macro_body,
attributes,
def_site,
call_site,
mixed_site,
)
.map(|it| { .map(|it| {
( (
msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table), msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table),
@ -271,31 +241,74 @@ impl PanicMessage {
} }
} }
struct EnvSnapshot { pub struct EnvSnapshot {
vars: HashMap<OsString, OsString>, vars: HashMap<OsString, OsString>,
} }
impl EnvSnapshot { impl EnvSnapshot {
fn new() -> EnvSnapshot { pub fn new() -> EnvSnapshot {
EnvSnapshot { vars: env::vars_os().collect() } EnvSnapshot { vars: env::vars_os().collect() }
} }
}
struct EnvChange<'snap> {
changed_vars: Vec<String>,
prev_working_dir: Option<PathBuf>,
snap: &'snap EnvSnapshot,
}
impl<'snap> EnvChange<'snap> {
fn apply(
snap: &'snap EnvSnapshot,
new_vars: Vec<(String, String)>,
current_dir: Option<&Path>,
) -> EnvChange<'snap> {
let prev_working_dir = match current_dir {
Some(dir) => {
let prev_working_dir = std::env::current_dir().ok();
if let Err(err) = std::env::set_current_dir(dir) {
eprintln!(
"Failed to set the current working dir to {}. Error: {err:?}",
dir.display()
)
}
prev_working_dir
}
None => None,
};
EnvChange {
snap,
changed_vars: new_vars
.into_iter()
.map(|(k, v)| {
env::set_var(&k, v);
k
})
.collect(),
prev_working_dir,
}
}
fn rollback(self) {} fn rollback(self) {}
} }
impl Drop for EnvSnapshot { impl Drop for EnvChange<'_> {
fn drop(&mut self) { fn drop(&mut self) {
for (name, value) in env::vars_os() { for name in self.changed_vars.drain(..) {
let old_value = self.vars.remove(&name); match self.snap.vars.get::<std::ffi::OsStr>(name.as_ref()) {
if old_value != Some(value) { Some(prev_val) => env::set_var(name, prev_val),
match old_value {
None => env::remove_var(name), None => env::remove_var(name),
Some(old_value) => env::set_var(name, old_value),
} }
} }
if let Some(dir) = &self.prev_working_dir {
if let Err(err) = std::env::set_current_dir(&dir) {
eprintln!(
"Failed to set the current working dir to {}. Error: {:?}",
dir.display(),
err
)
} }
for (name, old_value) in self.vars.drain() {
env::set_var(name, old_value)
} }
} }
} }

View file

@ -1,9 +1,10 @@
//! Proc macro ABI //! Proc macro ABI
use libloading::Library;
use proc_macro::bridge; use proc_macro::bridge;
use proc_macro_api::ProcMacroKind; use proc_macro_api::ProcMacroKind;
use libloading::Library;
use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan};
pub(crate) struct ProcMacros { pub(crate) struct ProcMacros {
@ -49,10 +50,11 @@ impl ProcMacros {
call_site: S, call_site: S,
mixed_site: S, mixed_site: S,
) -> Result<tt::Subtree<S>, crate::PanicMessage> { ) -> Result<tt::Subtree<S>, crate::PanicMessage> {
let parsed_body = crate::server::TokenStream::with_subtree(macro_body); let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| { let parsed_attributes = attributes
crate::server::TokenStream::with_subtree(attr) .map_or_else(crate::server_impl::TokenStream::new, |attr| {
crate::server_impl::TokenStream::with_subtree(attr)
}); });
for proc_macro in &self.exported_macros { for proc_macro in &self.exported_macros {

View file

@ -14,7 +14,7 @@ use proc_macro::bridge::{self, server};
use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
use tt::{TextRange, TextSize}; use tt::{TextRange, TextSize};
use crate::server::{ use crate::server_impl::{
delim_to_external, delim_to_internal, literal_with_stringify_parts, delim_to_external, delim_to_internal, literal_with_stringify_parts,
token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
}; };
@ -29,7 +29,7 @@ mod tt {
pub type Ident = ::tt::Ident<super::Span>; pub type Ident = ::tt::Ident<super::Span>;
} }
type TokenStream = crate::server::TokenStream<Span>; type TokenStream = crate::server_impl::TokenStream<Span>;
#[derive(Clone)] #[derive(Clone)]
pub struct SourceFile; pub struct SourceFile;

View file

@ -7,7 +7,7 @@ use std::{
use proc_macro::bridge::{self, server}; use proc_macro::bridge::{self, server};
use crate::server::{ use crate::server_impl::{
delim_to_external, delim_to_internal, literal_with_stringify_parts, delim_to_external, delim_to_internal, literal_with_stringify_parts,
token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
}; };
@ -31,7 +31,7 @@ type Spacing = tt::Spacing;
#[allow(unused)] #[allow(unused)]
type Literal = tt::Literal; type Literal = tt::Literal;
type Span = tt::TokenId; type Span = tt::TokenId;
type TokenStream = crate::server::TokenStream<Span>; type TokenStream = crate::server_impl::TokenStream<Span>;
#[derive(Clone)] #[derive(Clone)]
pub struct SourceFile; pub struct SourceFile;

View file

@ -5,10 +5,10 @@ use proc_macro_api::msg::TokenId;
use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
use tt::TextRange; use tt::TextRange;
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream<TokenId> { fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
crate::server::TokenStream::with_subtree( crate::server_impl::TokenStream::with_subtree(
mbe::parse_to_token_tree_static_span(call_site, src).unwrap(), mbe::parse_to_token_tree_static_span(call_site, src).unwrap(),
) )
} }
@ -17,8 +17,8 @@ fn parse_string_spanned(
anchor: SpanAnchor, anchor: SpanAnchor,
call_site: SyntaxContextId, call_site: SyntaxContextId,
src: &str, src: &str,
) -> crate::server::TokenStream<Span> { ) -> crate::server_impl::TokenStream<Span> {
crate::server::TokenStream::with_subtree( crate::server_impl::TokenStream::with_subtree(
mbe::parse_to_token_tree(anchor, call_site, src).unwrap(), mbe::parse_to_token_tree(anchor, call_site, src).unwrap(),
) )
} }
@ -96,7 +96,8 @@ fn assert_expand_impl(
pub(crate) fn list() -> Vec<String> { pub(crate) fn list() -> Vec<String> {
let dylib_path = proc_macro_test_dylib_path(); let dylib_path = proc_macro_test_dylib_path();
let mut srv = ProcMacroSrv::default(); let env = EnvSnapshot::new();
let mut srv = ProcMacroSrv::new(&env);
let res = srv.list_macros(&dylib_path).unwrap(); let res = srv.list_macros(&dylib_path).unwrap();
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
} }

View file

@ -1,7 +1,5 @@
//! A collection of tools for profiling rust-analyzer. //! A collection of tools for profiling rust-analyzer.
#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(feature = "cpu_profiler")] #[cfg(feature = "cpu_profiler")]
mod google_cpu_profiler; mod google_cpu_profiler;
mod memory_usage; mod memory_usage;

View file

@ -15,8 +15,6 @@
//! procedural macros). //! procedural macros).
//! * Lowering of concrete model to a [`base_db::CrateGraph`] //! * Lowering of concrete model to a [`base_db::CrateGraph`]
#![warn(rust_2018_idioms, unused_lifetimes)]
mod build_scripts; mod build_scripts;
mod cargo_workspace; mod cargo_workspace;
mod cfg; mod cfg;

View file

@ -2,7 +2,6 @@
//! //!
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis //! Based on cli flags, either spawns an LSP server, or runs a batch analysis
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(clippy::print_stdout, clippy::print_stderr)] #![allow(clippy::print_stdout, clippy::print_stderr)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]

View file

@ -9,8 +9,6 @@
//! The `cli` submodule implements some batch-processing analysis, primarily as //! The `cli` submodule implements some batch-processing analysis, primarily as
//! a debugging aid. //! a debugging aid.
#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod cli; pub mod cli;
mod caps; mod caps;

View file

@ -529,7 +529,7 @@ impl GlobalState {
None => ws.find_sysroot_proc_macro_srv()?, None => ws.find_sysroot_proc_macro_srv()?,
}; };
let env = match &ws.kind { let env: FxHashMap<_, _> = match &ws.kind {
ProjectWorkspaceKind::Cargo { cargo_config_extra_env, .. } ProjectWorkspaceKind::Cargo { cargo_config_extra_env, .. }
| ProjectWorkspaceKind::DetachedFile { | ProjectWorkspaceKind::DetachedFile {
cargo: Some(_), cargo: Some(_),

View file

@ -8,7 +8,6 @@
//! specific JSON shapes here -- there's little value in such tests, as we can't //! specific JSON shapes here -- there's little value in such tests, as we can't
//! be sure without a real client anyway. //! be sure without a real client anyway.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(clippy::disallowed_types)] #![allow(clippy::disallowed_types)]
mod ratoml; mod ratoml;

View file

@ -6,8 +6,6 @@
//! //!
//! This crate contains utilities to make this kind of source-gen easy. //! This crate contains utilities to make this kind of source-gen easy.
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{ use std::{
fmt, fs, mem, fmt, fs, mem,
path::{Path, PathBuf}, path::{Path, PathBuf},

View file

@ -1,7 +1,5 @@
//! Missing batteries for standard libraries. //! Missing batteries for standard libraries.
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::io as sio; use std::io as sio;
use std::process::Command; use std::process::Command;
use std::{cmp::Ordering, ops, time::Instant}; use std::{cmp::Ordering, ops, time::Instant};

View file

@ -20,7 +20,6 @@
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> //! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(not(feature = "in-rust-tree"))] #[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer; extern crate ra_ap_rustc_lexer as rustc_lexer;

View file

@ -6,7 +6,6 @@
//! * Extracting markup (mainly, `$0` markers) out of fixture strings. //! * Extracting markup (mainly, `$0` markers) out of fixture strings.
//! * marks (see the eponymous module). //! * marks (see the eponymous module).
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(clippy::print_stderr)] #![allow(clippy::print_stderr)]
mod assert_linear; mod assert_linear;

View file

@ -4,8 +4,6 @@
//! so `TextEdit` is the ultimate representation of the work done by //! so `TextEdit` is the ultimate representation of the work done by
//! rust-analyzer. //! rust-analyzer.
#![warn(rust_2018_idioms, unused_lifetimes)]
use itertools::Itertools; use itertools::Itertools;
use std::cmp::max; use std::cmp::max;
pub use text_size::{TextRange, TextSize}; pub use text_size::{TextRange, TextSize};

View file

@ -1,7 +1,5 @@
//! Discovery of `cargo` & `rustc` executables. //! Discovery of `cargo` & `rustc` executables.
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{env, iter, path::PathBuf}; use std::{env, iter, path::PathBuf};
use camino::{Utf8Path, Utf8PathBuf}; use camino::{Utf8Path, Utf8PathBuf};

View file

@ -2,8 +2,6 @@
//! input and output) of macros. It closely mirrors `proc_macro` crate's //! input and output) of macros. It closely mirrors `proc_macro` crate's
//! `TokenTree`. //! `TokenTree`.
#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod buffer; pub mod buffer;
pub mod iter; pub mod iter;

View file

@ -7,8 +7,6 @@
//! Hopefully, one day a reliable file watching/walking crate appears on //! Hopefully, one day a reliable file watching/walking crate appears on
//! crates.io, and we can reduce this to trivial glue code. //! crates.io, and we can reduce this to trivial glue code.
#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{ use std::{
fs, fs,
path::{Component, Path}, path::{Component, Path},

View file

@ -38,8 +38,6 @@
//! [`Handle`]: loader::Handle //! [`Handle`]: loader::Handle
//! [`Entries`]: loader::Entry //! [`Entries`]: loader::Entry
#![warn(rust_2018_idioms, unused_lifetimes)]
mod anchored_path; mod anchored_path;
pub mod file_set; pub mod file_set;
pub mod loader; pub mod loader;