mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Auto merge of #17520 - Veykril:slim-proc-macro-api, r=Veykril
internal: Cleanup proc-macro-srv some more
This commit is contained in:
commit
ea7fdada6a
53 changed files with 352 additions and 349 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1337,7 +1337,6 @@ dependencies = [
|
|||
"stdx",
|
||||
"text-size",
|
||||
"tracing",
|
||||
"triomphe",
|
||||
"tt",
|
||||
]
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! base_db defines basic database traits. The concrete DB is defined by ide.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod change;
|
||||
mod input;
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod cfg_expr;
|
||||
mod dnf;
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
// addition to `cargo check`. Either split it into 3 crates (one for test, one for check
|
||||
// and one common utilities) or change its name and docs to reflect the current state.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::{fmt, io, process::Command, time::Duration};
|
||||
|
||||
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//! Note that `hir_def` is a work in progress, so not all of the above is
|
||||
//! actually true.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
|
||||
#[cfg(feature = "in-rust-tree")]
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
//! tree originates not from the text of some `FileId`, but from some macro
|
||||
//! expansion.
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
pub mod attrs;
|
||||
pub mod builtin_attr_macro;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! The type system. We currently use this to infer types for completion, hover
|
||||
//! information and various assists.
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
|
||||
#[cfg(feature = "in-rust-tree")]
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
|
||||
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
|
|
|
@ -58,8 +58,6 @@
|
|||
//! See also this post:
|
||||
//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod assist_config;
|
||||
mod assist_context;
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! `completions` crate provides utilities for generating completions of user input.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod completions;
|
||||
mod config;
|
||||
mod context;
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
//!
|
||||
//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod apply_change;
|
||||
|
||||
pub mod active_parameter;
|
||||
|
|
|
@ -23,8 +23,6 @@
|
|||
//! There are also a couple of ad-hoc diagnostics implemented directly here, we
|
||||
//! don't yet have a great pattern for how to do them properly.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod handlers {
|
||||
pub(crate) mod break_outside_of_loop;
|
||||
pub(crate) mod expected_function;
|
||||
|
|
|
@ -3,8 +3,6 @@
|
|||
//! Allows searching the AST for code that matches one or more patterns and then replacing that code
|
||||
//! based on a template.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
// Feature: Structural Search and Replace
|
||||
//
|
||||
// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
//! in this crate.
|
||||
|
||||
// For proving that RootDatabase is RefUnwindSafe.
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
#![recursion_limit = "128"]
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! limit defines a struct to enforce limits.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#[cfg(feature = "tracking")]
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
//! The tests for this functionality live in another crate:
|
||||
//! `hir_def::macro_expansion_tests::mbe`.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod expander;
|
||||
mod parser;
|
||||
mod syntax_bridge;
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
//!
|
||||
//! [`Parser`]: crate::parser::Parser
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![allow(rustdoc::private_intra_doc_links)]
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
//! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and
|
||||
//! relative paths.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
ffi::OsStr,
|
||||
|
|
|
@ -15,7 +15,6 @@ doctest = false
|
|||
serde.workspace = true
|
||||
serde_json = { workspace = true, features = ["unbounded_depth"] }
|
||||
tracing.workspace = true
|
||||
triomphe.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
indexmap.workspace = true
|
||||
|
||||
|
|
35
crates/proc-macro-api/src/json.rs
Normal file
35
crates/proc-macro-api/src/json.rs
Normal file
|
@ -0,0 +1,35 @@
|
|||
//! Protocol functions for json.
|
||||
use std::io::{self, BufRead, Write};
|
||||
|
||||
pub fn read_json<'a>(
|
||||
inp: &mut impl BufRead,
|
||||
buf: &'a mut String,
|
||||
) -> io::Result<Option<&'a String>> {
|
||||
loop {
|
||||
buf.clear();
|
||||
|
||||
inp.read_line(buf)?;
|
||||
buf.pop(); // Remove trailing '\n'
|
||||
|
||||
if buf.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Some ill behaved macro try to use stdout for debugging
|
||||
// We ignore it here
|
||||
if !buf.starts_with('{') {
|
||||
tracing::error!("proc-macro tried to print : {}", buf);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(Some(buf));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
|
||||
tracing::debug!("> {}", msg);
|
||||
out.write_all(msg.as_bytes())?;
|
||||
out.write_all(b"\n")?;
|
||||
out.flush()?;
|
||||
Ok(())
|
||||
}
|
|
@ -5,27 +5,23 @@
|
|||
//! is used to provide basic infrastructure for communication between two
|
||||
//! processes: Client (RA itself), Server (the external program)
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
pub mod json;
|
||||
pub mod msg;
|
||||
mod process;
|
||||
|
||||
use base_db::Env;
|
||||
use indexmap::IndexSet;
|
||||
use paths::{AbsPath, AbsPathBuf};
|
||||
use rustc_hash::FxHashMap;
|
||||
use span::Span;
|
||||
use std::{
|
||||
fmt, io,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use std::{fmt, io, sync::Arc};
|
||||
use tt::SmolStr;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
msg::{
|
||||
deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro,
|
||||
ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT,
|
||||
ExpnGlobals, FlatTree, PanicMessage, SpanDataIndexMap, HAS_GLOBAL_SPANS,
|
||||
RUST_ANALYZER_SPAN_SUPPORT,
|
||||
},
|
||||
process::ProcMacroProcessSrv,
|
||||
};
|
||||
|
@ -48,9 +44,7 @@ pub struct ProcMacroServer {
|
|||
///
|
||||
/// That means that concurrent salsa requests may block each other when expanding proc macros,
|
||||
/// which is unfortunate, but simple and good enough for the time being.
|
||||
///
|
||||
/// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
|
||||
process: Arc<Mutex<ProcMacroProcessSrv>>,
|
||||
process: Arc<ProcMacroProcessSrv>,
|
||||
path: AbsPathBuf,
|
||||
}
|
||||
|
||||
|
@ -70,9 +64,9 @@ impl MacroDylib {
|
|||
/// we share a single expander process for all macros.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProcMacro {
|
||||
process: Arc<Mutex<ProcMacroProcessSrv>>,
|
||||
dylib_path: AbsPathBuf,
|
||||
name: String,
|
||||
process: Arc<ProcMacroProcessSrv>,
|
||||
dylib_path: Arc<AbsPathBuf>,
|
||||
name: SmolStr,
|
||||
kind: ProcMacroKind,
|
||||
}
|
||||
|
||||
|
@ -81,7 +75,7 @@ impl PartialEq for ProcMacro {
|
|||
fn eq(&self, other: &Self) -> bool {
|
||||
self.name == other.name
|
||||
&& self.kind == other.kind
|
||||
&& self.dylib_path == other.dylib_path
|
||||
&& Arc::ptr_eq(&self.dylib_path, &other.dylib_path)
|
||||
&& Arc::ptr_eq(&self.process, &other.process)
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +83,6 @@ impl PartialEq for ProcMacro {
|
|||
#[derive(Clone, Debug)]
|
||||
pub struct ServerError {
|
||||
pub message: String,
|
||||
// io::Error isn't Clone for some reason
|
||||
pub io: Option<Arc<io::Error>>,
|
||||
}
|
||||
|
||||
|
@ -104,21 +97,15 @@ impl fmt::Display for ServerError {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct MacroPanic {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl ProcMacroServer {
|
||||
/// Spawns an external process as the proc macro server and returns a client connected to it.
|
||||
pub fn spawn(
|
||||
process_path: &AbsPath,
|
||||
env: &FxHashMap<String, String>,
|
||||
env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>
|
||||
+ Clone,
|
||||
) -> io::Result<ProcMacroServer> {
|
||||
let process = ProcMacroProcessSrv::run(process_path, env)?;
|
||||
Ok(ProcMacroServer {
|
||||
process: Arc::new(Mutex::new(process)),
|
||||
path: process_path.to_owned(),
|
||||
})
|
||||
Ok(ProcMacroServer { process: Arc::new(process), path: process_path.to_owned() })
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &AbsPath {
|
||||
|
@ -127,17 +114,17 @@ impl ProcMacroServer {
|
|||
|
||||
pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
|
||||
let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered();
|
||||
let macros =
|
||||
self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
|
||||
let macros = self.process.find_proc_macros(&dylib.path)?;
|
||||
|
||||
let dylib_path = Arc::new(dylib.path);
|
||||
match macros {
|
||||
Ok(macros) => Ok(macros
|
||||
.into_iter()
|
||||
.map(|(name, kind)| ProcMacro {
|
||||
process: self.process.clone(),
|
||||
name,
|
||||
name: name.into(),
|
||||
kind,
|
||||
dylib_path: dylib.path.clone(),
|
||||
dylib_path: dylib_path.clone(),
|
||||
})
|
||||
.collect()),
|
||||
Err(message) => Err(ServerError { message, io: None }),
|
||||
|
@ -163,20 +150,19 @@ impl ProcMacro {
|
|||
call_site: Span,
|
||||
mixed_site: Span,
|
||||
) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> {
|
||||
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
|
||||
let version = self.process.version();
|
||||
let current_dir = env.get("CARGO_MANIFEST_DIR");
|
||||
|
||||
let mut span_data_table = IndexSet::default();
|
||||
let mut span_data_table = SpanDataIndexMap::default();
|
||||
let def_site = span_data_table.insert_full(def_site).0;
|
||||
let call_site = span_data_table.insert_full(call_site).0;
|
||||
let mixed_site = span_data_table.insert_full(mixed_site).0;
|
||||
let task = ExpandMacro {
|
||||
data: msg::ExpandMacroData {
|
||||
macro_body: FlatTree::new(subtree, version, &mut span_data_table),
|
||||
macro_name: self.name.to_string(),
|
||||
attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
|
||||
lib: self.dylib_path.to_path_buf().into(),
|
||||
env: env.into(),
|
||||
current_dir,
|
||||
attributes: attr
|
||||
.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
|
||||
has_global_spans: ExpnGlobals {
|
||||
serialize: version >= HAS_GLOBAL_SPANS,
|
||||
def_site,
|
||||
|
@ -188,13 +174,13 @@ impl ProcMacro {
|
|||
} else {
|
||||
Vec::new()
|
||||
},
|
||||
},
|
||||
lib: self.dylib_path.to_path_buf().into(),
|
||||
env: env.into(),
|
||||
current_dir,
|
||||
};
|
||||
|
||||
let response = self
|
||||
.process
|
||||
.lock()
|
||||
.unwrap_or_else(|e| e.into_inner())
|
||||
.send_task(msg::Request::ExpandMacro(Box::new(task)))?;
|
||||
let response = self.process.send_task(msg::Request::ExpandMacro(Box::new(task)))?;
|
||||
|
||||
match response {
|
||||
msg::Response::ExpandMacro(it) => {
|
||||
|
|
|
@ -72,6 +72,16 @@ pub struct PanicMessage(pub String);
|
|||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ExpandMacro {
|
||||
pub lib: Utf8PathBuf,
|
||||
/// Environment variables to set during macro expansion.
|
||||
pub env: Vec<(String, String)>,
|
||||
pub current_dir: Option<String>,
|
||||
#[serde(flatten)]
|
||||
pub data: ExpandMacroData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ExpandMacroData {
|
||||
/// Argument of macro call.
|
||||
///
|
||||
/// In custom derive this will be a struct or enum; in attribute-like macro - underlying
|
||||
|
@ -86,13 +96,6 @@ pub struct ExpandMacro {
|
|||
|
||||
/// Possible attributes for the attribute-like macros.
|
||||
pub attributes: Option<FlatTree>,
|
||||
|
||||
pub lib: Utf8PathBuf,
|
||||
|
||||
/// Environment variables to set during macro expansion.
|
||||
pub env: Vec<(String, String)>,
|
||||
|
||||
pub current_dir: Option<String>,
|
||||
/// marker for serde skip stuff
|
||||
#[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
|
||||
#[serde(default)]
|
||||
|
@ -119,8 +122,12 @@ impl ExpnGlobals {
|
|||
}
|
||||
|
||||
pub trait Message: Serialize + DeserializeOwned {
|
||||
fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> {
|
||||
Ok(match read_json(inp, buf)? {
|
||||
fn read<R: BufRead>(
|
||||
from_proto: ProtocolRead<R>,
|
||||
inp: &mut R,
|
||||
buf: &mut String,
|
||||
) -> io::Result<Option<Self>> {
|
||||
Ok(match from_proto(inp, buf)? {
|
||||
None => None,
|
||||
Some(text) => {
|
||||
let mut deserializer = serde_json::Deserializer::from_str(text);
|
||||
|
@ -131,44 +138,20 @@ pub trait Message: Serialize + DeserializeOwned {
|
|||
}
|
||||
})
|
||||
}
|
||||
fn write(self, out: &mut impl Write) -> io::Result<()> {
|
||||
fn write<W: Write>(self, to_proto: ProtocolWrite<W>, out: &mut W) -> io::Result<()> {
|
||||
let text = serde_json::to_string(&self)?;
|
||||
write_json(out, &text)
|
||||
to_proto(out, &text)
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for Request {}
|
||||
impl Message for Response {}
|
||||
|
||||
fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> {
|
||||
loop {
|
||||
buf.clear();
|
||||
|
||||
inp.read_line(buf)?;
|
||||
buf.pop(); // Remove trailing '\n'
|
||||
|
||||
if buf.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Some ill behaved macro try to use stdout for debugging
|
||||
// We ignore it here
|
||||
if !buf.starts_with('{') {
|
||||
tracing::error!("proc-macro tried to print : {}", buf);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(Some(buf));
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
|
||||
tracing::debug!("> {}", msg);
|
||||
out.write_all(msg.as_bytes())?;
|
||||
out.write_all(b"\n")?;
|
||||
out.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
#[allow(type_alias_bounds)]
|
||||
type ProtocolRead<R: BufRead> =
|
||||
for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result<Option<&'buf String>>;
|
||||
#[allow(type_alias_bounds)]
|
||||
type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
@ -268,12 +251,10 @@ mod tests {
|
|||
let tt = fixture_token_tree();
|
||||
let mut span_data_table = Default::default();
|
||||
let task = ExpandMacro {
|
||||
data: ExpandMacroData {
|
||||
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
|
||||
macro_name: Default::default(),
|
||||
attributes: None,
|
||||
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
|
||||
env: Default::default(),
|
||||
current_dir: Default::default(),
|
||||
has_global_spans: ExpnGlobals {
|
||||
serialize: true,
|
||||
def_site: 0,
|
||||
|
@ -281,12 +262,19 @@ mod tests {
|
|||
mixed_site: 0,
|
||||
},
|
||||
span_data_table: Vec::new(),
|
||||
},
|
||||
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
|
||||
env: Default::default(),
|
||||
current_dir: Default::default(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&task).unwrap();
|
||||
// println!("{}", json);
|
||||
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
|
||||
assert_eq!(
|
||||
tt,
|
||||
back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@
|
|||
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use indexmap::IndexSet;
|
||||
use la_arena::RawIdx;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -46,7 +45,8 @@ use text_size::TextRange;
|
|||
|
||||
use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
|
||||
|
||||
pub type SpanDataIndexMap = IndexSet<Span>;
|
||||
pub type SpanDataIndexMap =
|
||||
indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||
|
||||
pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> {
|
||||
map.iter()
|
||||
|
@ -328,7 +328,7 @@ impl InternableSpan for TokenId {
|
|||
}
|
||||
}
|
||||
impl InternableSpan for Span {
|
||||
type Table = IndexSet<Span>;
|
||||
type Table = SpanDataIndexMap;
|
||||
fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
|
||||
TokenId(table.insert_full(span).0 as u32)
|
||||
}
|
||||
|
|
|
@ -3,43 +3,48 @@
|
|||
use std::{
|
||||
io::{self, BufRead, BufReader, Read, Write},
|
||||
process::{Child, ChildStdin, ChildStdout, Command, Stdio},
|
||||
sync::Arc,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use paths::AbsPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::JodChild;
|
||||
|
||||
use crate::{
|
||||
json::{read_json, write_json},
|
||||
msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT},
|
||||
ProcMacroKind, ServerError,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ProcMacroProcessSrv {
|
||||
/// The state of the proc-macro server process, the protocol is currently strictly sequential
|
||||
/// hence the lock on the state.
|
||||
state: Mutex<ProcessSrvState>,
|
||||
version: u32,
|
||||
mode: SpanMode,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProcessSrvState {
|
||||
process: Process,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
/// Populated when the server exits.
|
||||
server_exited: Option<ServerError>,
|
||||
version: u32,
|
||||
mode: SpanMode,
|
||||
}
|
||||
|
||||
impl ProcMacroProcessSrv {
|
||||
pub(crate) fn run(
|
||||
process_path: &AbsPath,
|
||||
env: &FxHashMap<String, String>,
|
||||
env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>
|
||||
+ Clone,
|
||||
) -> io::Result<ProcMacroProcessSrv> {
|
||||
let create_srv = |null_stderr| {
|
||||
let mut process = Process::run(process_path, env, null_stderr)?;
|
||||
let mut process = Process::run(process_path, env.clone(), null_stderr)?;
|
||||
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
|
||||
|
||||
io::Result::Ok(ProcMacroProcessSrv {
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
server_exited: None,
|
||||
state: Mutex::new(ProcessSrvState { process, stdin, stdout, server_exited: None }),
|
||||
version: 0,
|
||||
mode: SpanMode::Id,
|
||||
})
|
||||
|
@ -76,7 +81,7 @@ impl ProcMacroProcessSrv {
|
|||
self.version
|
||||
}
|
||||
|
||||
pub(crate) fn version_check(&mut self) -> Result<u32, ServerError> {
|
||||
fn version_check(&self) -> Result<u32, ServerError> {
|
||||
let request = Request::ApiVersionCheck {};
|
||||
let response = self.send_task(request)?;
|
||||
|
||||
|
@ -86,7 +91,7 @@ impl ProcMacroProcessSrv {
|
|||
}
|
||||
}
|
||||
|
||||
fn enable_rust_analyzer_spans(&mut self) -> Result<SpanMode, ServerError> {
|
||||
fn enable_rust_analyzer_spans(&self) -> Result<SpanMode, ServerError> {
|
||||
let request = Request::SetConfig(crate::msg::ServerConfig {
|
||||
span_mode: crate::msg::SpanMode::RustAnalyzer,
|
||||
});
|
||||
|
@ -99,7 +104,7 @@ impl ProcMacroProcessSrv {
|
|||
}
|
||||
|
||||
pub(crate) fn find_proc_macros(
|
||||
&mut self,
|
||||
&self,
|
||||
dylib_path: &AbsPath,
|
||||
) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
|
||||
let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() };
|
||||
|
@ -112,20 +117,21 @@ impl ProcMacroProcessSrv {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn send_task(&mut self, req: Request) -> Result<Response, ServerError> {
|
||||
if let Some(server_error) = &self.server_exited {
|
||||
pub(crate) fn send_task(&self, req: Request) -> Result<Response, ServerError> {
|
||||
let state = &mut *self.state.lock().unwrap();
|
||||
if let Some(server_error) = &state.server_exited {
|
||||
return Err(server_error.clone());
|
||||
}
|
||||
|
||||
let mut buf = String::new();
|
||||
send_request(&mut self.stdin, &mut self.stdout, req, &mut buf).map_err(|e| {
|
||||
send_request(&mut state.stdin, &mut state.stdout, req, &mut buf).map_err(|e| {
|
||||
if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) {
|
||||
match self.process.child.try_wait() {
|
||||
match state.process.child.try_wait() {
|
||||
Ok(None) => e,
|
||||
Ok(Some(status)) => {
|
||||
let mut msg = String::new();
|
||||
if !status.success() {
|
||||
if let Some(stderr) = self.process.child.stderr.as_mut() {
|
||||
if let Some(stderr) = state.process.child.stderr.as_mut() {
|
||||
_ = stderr.read_to_string(&mut msg);
|
||||
}
|
||||
}
|
||||
|
@ -133,7 +139,7 @@ impl ProcMacroProcessSrv {
|
|||
message: format!("server exited with {status}: {msg}"),
|
||||
io: None,
|
||||
};
|
||||
self.server_exited = Some(server_error.clone());
|
||||
state.server_exited = Some(server_error.clone());
|
||||
server_error
|
||||
}
|
||||
Err(_) => e,
|
||||
|
@ -153,7 +159,7 @@ struct Process {
|
|||
impl Process {
|
||||
fn run(
|
||||
path: &AbsPath,
|
||||
env: &FxHashMap<String, String>,
|
||||
env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>,
|
||||
null_stderr: bool,
|
||||
) -> io::Result<Process> {
|
||||
let child = JodChild(mk_child(path, env, null_stderr)?);
|
||||
|
@ -171,7 +177,7 @@ impl Process {
|
|||
|
||||
fn mk_child(
|
||||
path: &AbsPath,
|
||||
env: &FxHashMap<String, String>,
|
||||
env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>,
|
||||
null_stderr: bool,
|
||||
) -> io::Result<Child> {
|
||||
let mut cmd = Command::new(path);
|
||||
|
@ -196,11 +202,11 @@ fn send_request(
|
|||
req: Request,
|
||||
buf: &mut String,
|
||||
) -> Result<Response, ServerError> {
|
||||
req.write(&mut writer).map_err(|err| ServerError {
|
||||
req.write(write_json, &mut writer).map_err(|err| ServerError {
|
||||
message: "failed to write request".into(),
|
||||
io: Some(Arc::new(err)),
|
||||
})?;
|
||||
let res = Response::read(&mut reader, buf).map_err(|err| ServerError {
|
||||
let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError {
|
||||
message: "failed to read response".into(),
|
||||
io: Some(Arc::new(err)),
|
||||
})?;
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
#[cfg(feature = "in-rust-tree")]
|
||||
extern crate rustc_driver as _;
|
||||
|
||||
use proc_macro_api::json::{read_json, write_json};
|
||||
|
||||
use std::io;
|
||||
|
||||
fn main() -> std::io::Result<()> {
|
||||
|
@ -26,19 +28,49 @@ fn main() -> std::io::Result<()> {
|
|||
|
||||
#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
|
||||
fn run() -> io::Result<()> {
|
||||
eprintln!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
|
||||
std::process::exit(70);
|
||||
let err = "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function";
|
||||
eprintln!("{err}");
|
||||
use proc_macro_api::msg::{self, Message};
|
||||
|
||||
let read_request =
|
||||
|buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf);
|
||||
|
||||
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
|
||||
|
||||
let mut buf = String::new();
|
||||
|
||||
while let Some(req) = read_request(&mut buf)? {
|
||||
let res = match req {
|
||||
msg::Request::ListMacros { .. } => msg::Response::ListMacros(Err(err.to_owned())),
|
||||
msg::Request::ExpandMacro(_) => {
|
||||
msg::Response::ExpandMacro(Err(msg::PanicMessage(err.to_owned())))
|
||||
}
|
||||
msg::Request::ApiVersionCheck {} => {
|
||||
msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION)
|
||||
}
|
||||
msg::Request::SetConfig(_) => {
|
||||
msg::Response::SetConfig(proc_macro_api::msg::ServerConfig {
|
||||
span_mode: msg::SpanMode::Id,
|
||||
})
|
||||
}
|
||||
};
|
||||
write_response(res)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||
fn run() -> io::Result<()> {
|
||||
use proc_macro_api::msg::{self, Message};
|
||||
use proc_macro_srv::EnvSnapshot;
|
||||
|
||||
let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf);
|
||||
let read_request =
|
||||
|buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf);
|
||||
|
||||
let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock());
|
||||
let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock());
|
||||
|
||||
let mut srv = proc_macro_srv::ProcMacroSrv::default();
|
||||
let env = EnvSnapshot::new();
|
||||
let mut srv = proc_macro_srv::ProcMacroSrv::new(&env);
|
||||
let mut buf = String::new();
|
||||
|
||||
while let Some(req) = read_request(&mut buf)? {
|
||||
|
|
|
@ -1,27 +1,15 @@
|
|||
//! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is
|
||||
//! build with and make it accessible at runtime for ABI selection.
|
||||
|
||||
use std::{env, fs::File, io::Write, path::PathBuf, process::Command};
|
||||
use std::{env, process::Command};
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rustc-check-cfg=cfg(rust_analyzer)");
|
||||
|
||||
let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
|
||||
path.push("rustc_version.rs");
|
||||
let mut f = File::create(&path).unwrap();
|
||||
println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
|
||||
|
||||
let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
|
||||
let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
|
||||
let version_string = std::str::from_utf8(&output.stdout[..])
|
||||
.expect("rustc --version output must be UTF-8")
|
||||
.trim();
|
||||
|
||||
write!(
|
||||
f,
|
||||
"
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?};
|
||||
"
|
||||
)
|
||||
.unwrap();
|
||||
println!("cargo::rustc-env=RUSTC_VERSION={}", version_string);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
//! 1.58) and future ABIs (stage1, nightly)
|
||||
|
||||
use std::{
|
||||
env, fs,
|
||||
env,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
@ -30,8 +30,7 @@ fn main() {
|
|||
|
||||
if !has_features {
|
||||
println!("proc-macro-test testing only works on nightly toolchains");
|
||||
let info_path = out_dir.join("proc_macro_test_location.txt");
|
||||
fs::File::create(info_path).unwrap();
|
||||
println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION=\"\"");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -121,6 +120,5 @@ fn main() {
|
|||
// This file is under `target_dir` and is already under `OUT_DIR`.
|
||||
let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found");
|
||||
|
||||
let info_path = out_dir.join("proc_macro_test_location.txt");
|
||||
fs::write(info_path, artifact_path.to_str().unwrap()).unwrap();
|
||||
println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION={}", artifact_path.display());
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Exports a few trivial procedural macros for testing.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#![feature(proc_macro_span, proc_macro_def_site)]
|
||||
#![allow(clippy::all)]
|
||||
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
//! Exports a few trivial procedural macros for testing.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
pub static PROC_MACRO_TEST_LOCATION: &str =
|
||||
include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt"));
|
||||
pub static PROC_MACRO_TEST_LOCATION: &str = env!("PROC_MACRO_TEST_LOCATION");
|
||||
|
|
|
@ -2,13 +2,13 @@
|
|||
|
||||
mod version;
|
||||
|
||||
use proc_macro::bridge;
|
||||
use std::{fmt, fs::File, io};
|
||||
|
||||
use libloading::Library;
|
||||
use memmap2::Mmap;
|
||||
use object::Object;
|
||||
use paths::{AbsPath, Utf8Path, Utf8PathBuf};
|
||||
use proc_macro::bridge;
|
||||
use proc_macro_api::ProcMacroKind;
|
||||
|
||||
use crate::ProcMacroSrvSpan;
|
||||
|
@ -133,24 +133,33 @@ impl ProcMacroLibraryLibloading {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct Expander {
|
||||
pub(crate) struct Expander {
|
||||
inner: ProcMacroLibraryLibloading,
|
||||
path: Utf8PathBuf,
|
||||
}
|
||||
|
||||
impl Drop for Expander {
|
||||
fn drop(&mut self) {
|
||||
#[cfg(windows)]
|
||||
std::fs::remove_file(&self.path).ok();
|
||||
_ = self.path;
|
||||
}
|
||||
}
|
||||
|
||||
impl Expander {
|
||||
pub fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> {
|
||||
pub(crate) fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> {
|
||||
// Some libraries for dynamic loading require canonicalized path even when it is
|
||||
// already absolute
|
||||
let lib = lib.canonicalize_utf8()?;
|
||||
|
||||
let lib = ensure_file_with_lock_free_access(&lib)?;
|
||||
let path = ensure_file_with_lock_free_access(&lib)?;
|
||||
|
||||
let library = ProcMacroLibraryLibloading::open(lib.as_ref())?;
|
||||
let library = ProcMacroLibraryLibloading::open(path.as_ref())?;
|
||||
|
||||
Ok(Expander { inner: library })
|
||||
Ok(Expander { inner: library, path })
|
||||
}
|
||||
|
||||
pub fn expand<S: ProcMacroSrvSpan>(
|
||||
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
||||
&self,
|
||||
macro_name: &str,
|
||||
macro_body: tt::Subtree<S>,
|
||||
|
@ -169,7 +178,7 @@ impl Expander {
|
|||
result.map_err(|e| e.into_string().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
|
||||
pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
|
||||
self.inner.proc_macros.list_macros()
|
||||
}
|
||||
}
|
||||
|
@ -198,7 +207,7 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result<Utf8PathBuf>
|
|||
unique_name.push_str(file_name);
|
||||
|
||||
to.push(unique_name);
|
||||
std::fs::copy(path, &to).unwrap();
|
||||
std::fs::copy(path, &to)?;
|
||||
Ok(to)
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![allow(unreachable_pub, internal_features)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
@ -27,13 +26,15 @@ extern crate rustc_lexer;
|
|||
|
||||
mod dylib;
|
||||
mod proc_macros;
|
||||
mod server;
|
||||
mod server_impl;
|
||||
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap},
|
||||
env,
|
||||
ffi::OsString,
|
||||
fs, thread,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
thread,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
|
@ -47,46 +48,25 @@ use proc_macro_api::{
|
|||
};
|
||||
use span::Span;
|
||||
|
||||
use crate::server::TokenStream;
|
||||
use crate::server_impl::TokenStream;
|
||||
|
||||
// see `build.rs`
|
||||
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
|
||||
pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION");
|
||||
|
||||
trait ProcMacroSrvSpan: Copy {
|
||||
type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
|
||||
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
|
||||
}
|
||||
|
||||
impl ProcMacroSrvSpan for TokenId {
|
||||
type Server = server::token_id::TokenIdServer;
|
||||
|
||||
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
|
||||
Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site }
|
||||
}
|
||||
}
|
||||
impl ProcMacroSrvSpan for Span {
|
||||
type Server = server::rust_analyzer_span::RaSpanServer;
|
||||
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
|
||||
Self::Server {
|
||||
interner: &server::SYMBOL_INTERNER,
|
||||
call_site,
|
||||
def_site,
|
||||
mixed_site,
|
||||
tracked_env_vars: Default::default(),
|
||||
tracked_paths: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ProcMacroSrv {
|
||||
pub struct ProcMacroSrv<'env> {
|
||||
expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>,
|
||||
span_mode: SpanMode,
|
||||
env: &'env EnvSnapshot,
|
||||
}
|
||||
|
||||
impl<'env> ProcMacroSrv<'env> {
|
||||
pub fn new(env: &'env EnvSnapshot) -> Self {
|
||||
Self { expanders: Default::default(), span_mode: Default::default(), env }
|
||||
}
|
||||
}
|
||||
|
||||
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
|
||||
|
||||
impl ProcMacroSrv {
|
||||
impl<'env> ProcMacroSrv<'env> {
|
||||
pub fn set_span_mode(&mut self, span_mode: SpanMode) {
|
||||
self.span_mode = span_mode;
|
||||
}
|
||||
|
@ -97,52 +77,24 @@ impl ProcMacroSrv {
|
|||
|
||||
pub fn expand(
|
||||
&mut self,
|
||||
task: msg::ExpandMacro,
|
||||
msg::ExpandMacro { lib, env, current_dir, data }: msg::ExpandMacro,
|
||||
) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> {
|
||||
let span_mode = self.span_mode;
|
||||
let expander = self.expander(task.lib.as_ref()).map_err(|err| {
|
||||
let snapped_env = self.env;
|
||||
let expander = self.expander(lib.as_ref()).map_err(|err| {
|
||||
debug_assert!(false, "should list macros before asking to expand");
|
||||
msg::PanicMessage(format!("failed to load macro: {err}"))
|
||||
})?;
|
||||
|
||||
let prev_env = EnvSnapshot::new();
|
||||
for (k, v) in &task.env {
|
||||
env::set_var(k, v);
|
||||
}
|
||||
let prev_working_dir = match &task.current_dir {
|
||||
Some(dir) => {
|
||||
let prev_working_dir = std::env::current_dir().ok();
|
||||
if let Err(err) = std::env::set_current_dir(dir) {
|
||||
eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}")
|
||||
}
|
||||
prev_working_dir
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
|
||||
let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref));
|
||||
|
||||
let result = match span_mode {
|
||||
SpanMode::Id => {
|
||||
expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![]))
|
||||
}
|
||||
SpanMode::RustAnalyzer => {
|
||||
expand_ra_span(task, expander, def_site, call_site, mixed_site)
|
||||
}
|
||||
SpanMode::Id => expand_id(data, expander).map(|it| (it, vec![])),
|
||||
SpanMode::RustAnalyzer => expand_ra_span(data, expander),
|
||||
};
|
||||
|
||||
prev_env.rollback();
|
||||
|
||||
if let Some(dir) = prev_working_dir {
|
||||
if let Err(err) = std::env::set_current_dir(&dir) {
|
||||
eprintln!(
|
||||
"Failed to set the current working dir to {}. Error: {:?}",
|
||||
dir.display(),
|
||||
err
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
result.map_err(msg::PanicMessage)
|
||||
}
|
||||
|
||||
|
@ -169,33 +121,55 @@ impl ProcMacroSrv {
|
|||
}
|
||||
}
|
||||
|
||||
trait ProcMacroSrvSpan: Copy {
|
||||
type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
|
||||
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
|
||||
}
|
||||
|
||||
impl ProcMacroSrvSpan for TokenId {
|
||||
type Server = server_impl::token_id::TokenIdServer;
|
||||
|
||||
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
|
||||
Self::Server { interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site }
|
||||
}
|
||||
}
|
||||
impl ProcMacroSrvSpan for Span {
|
||||
type Server = server_impl::rust_analyzer_span::RaSpanServer;
|
||||
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
|
||||
Self::Server {
|
||||
interner: &server_impl::SYMBOL_INTERNER,
|
||||
call_site,
|
||||
def_site,
|
||||
mixed_site,
|
||||
tracked_env_vars: Default::default(),
|
||||
tracked_paths: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_id(
|
||||
task: msg::ExpandMacro,
|
||||
msg::ExpandMacroData {
|
||||
macro_body,
|
||||
macro_name,
|
||||
attributes,
|
||||
has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site },
|
||||
span_data_table: _,
|
||||
}: msg::ExpandMacroData,
|
||||
expander: &dylib::Expander,
|
||||
def_site: usize,
|
||||
call_site: usize,
|
||||
mixed_site: usize,
|
||||
) -> Result<msg::FlatTree, String> {
|
||||
let def_site = TokenId(def_site as u32);
|
||||
let call_site = TokenId(call_site as u32);
|
||||
let mixed_site = TokenId(mixed_site as u32);
|
||||
|
||||
let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
|
||||
let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
|
||||
let macro_body = macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
|
||||
let attributes = attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
|
||||
let result = thread::scope(|s| {
|
||||
let thread = thread::Builder::new()
|
||||
.stack_size(EXPANDER_STACK_SIZE)
|
||||
.name(task.macro_name.clone())
|
||||
.name(macro_name.clone())
|
||||
.spawn_scoped(s, || {
|
||||
expander
|
||||
.expand(
|
||||
&task.macro_name,
|
||||
macro_body,
|
||||
attributes,
|
||||
def_site,
|
||||
call_site,
|
||||
mixed_site,
|
||||
)
|
||||
.expand(¯o_name, macro_body, attributes, def_site, call_site, mixed_site)
|
||||
.map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
|
||||
});
|
||||
let res = match thread {
|
||||
|
@ -212,35 +186,31 @@ fn expand_id(
|
|||
}
|
||||
|
||||
fn expand_ra_span(
|
||||
task: msg::ExpandMacro,
|
||||
msg::ExpandMacroData {
|
||||
macro_body,
|
||||
macro_name,
|
||||
attributes,
|
||||
has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site },
|
||||
span_data_table,
|
||||
}: msg::ExpandMacroData,
|
||||
expander: &dylib::Expander,
|
||||
def_site: usize,
|
||||
call_site: usize,
|
||||
mixed_site: usize,
|
||||
) -> Result<(msg::FlatTree, Vec<u32>), String> {
|
||||
let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table);
|
||||
let mut span_data_table = deserialize_span_data_index_map(&span_data_table);
|
||||
|
||||
let def_site = span_data_table[def_site];
|
||||
let call_site = span_data_table[call_site];
|
||||
let mixed_site = span_data_table[mixed_site];
|
||||
|
||||
let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table);
|
||||
let macro_body = macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table);
|
||||
let attributes =
|
||||
task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
|
||||
attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
|
||||
let result = thread::scope(|s| {
|
||||
let thread = thread::Builder::new()
|
||||
.stack_size(EXPANDER_STACK_SIZE)
|
||||
.name(task.macro_name.clone())
|
||||
.name(macro_name.clone())
|
||||
.spawn_scoped(s, || {
|
||||
expander
|
||||
.expand(
|
||||
&task.macro_name,
|
||||
macro_body,
|
||||
attributes,
|
||||
def_site,
|
||||
call_site,
|
||||
mixed_site,
|
||||
)
|
||||
.expand(¯o_name, macro_body, attributes, def_site, call_site, mixed_site)
|
||||
.map(|it| {
|
||||
(
|
||||
msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table),
|
||||
|
@ -271,31 +241,74 @@ impl PanicMessage {
|
|||
}
|
||||
}
|
||||
|
||||
struct EnvSnapshot {
|
||||
pub struct EnvSnapshot {
|
||||
vars: HashMap<OsString, OsString>,
|
||||
}
|
||||
|
||||
impl EnvSnapshot {
|
||||
fn new() -> EnvSnapshot {
|
||||
pub fn new() -> EnvSnapshot {
|
||||
EnvSnapshot { vars: env::vars_os().collect() }
|
||||
}
|
||||
}
|
||||
|
||||
struct EnvChange<'snap> {
|
||||
changed_vars: Vec<String>,
|
||||
prev_working_dir: Option<PathBuf>,
|
||||
snap: &'snap EnvSnapshot,
|
||||
}
|
||||
|
||||
impl<'snap> EnvChange<'snap> {
|
||||
fn apply(
|
||||
snap: &'snap EnvSnapshot,
|
||||
new_vars: Vec<(String, String)>,
|
||||
current_dir: Option<&Path>,
|
||||
) -> EnvChange<'snap> {
|
||||
let prev_working_dir = match current_dir {
|
||||
Some(dir) => {
|
||||
let prev_working_dir = std::env::current_dir().ok();
|
||||
if let Err(err) = std::env::set_current_dir(dir) {
|
||||
eprintln!(
|
||||
"Failed to set the current working dir to {}. Error: {err:?}",
|
||||
dir.display()
|
||||
)
|
||||
}
|
||||
prev_working_dir
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
EnvChange {
|
||||
snap,
|
||||
changed_vars: new_vars
|
||||
.into_iter()
|
||||
.map(|(k, v)| {
|
||||
env::set_var(&k, v);
|
||||
k
|
||||
})
|
||||
.collect(),
|
||||
prev_working_dir,
|
||||
}
|
||||
}
|
||||
|
||||
fn rollback(self) {}
|
||||
}
|
||||
|
||||
impl Drop for EnvSnapshot {
|
||||
impl Drop for EnvChange<'_> {
|
||||
fn drop(&mut self) {
|
||||
for (name, value) in env::vars_os() {
|
||||
let old_value = self.vars.remove(&name);
|
||||
if old_value != Some(value) {
|
||||
match old_value {
|
||||
for name in self.changed_vars.drain(..) {
|
||||
match self.snap.vars.get::<std::ffi::OsStr>(name.as_ref()) {
|
||||
Some(prev_val) => env::set_var(name, prev_val),
|
||||
None => env::remove_var(name),
|
||||
Some(old_value) => env::set_var(name, old_value),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(dir) = &self.prev_working_dir {
|
||||
if let Err(err) = std::env::set_current_dir(&dir) {
|
||||
eprintln!(
|
||||
"Failed to set the current working dir to {}. Error: {:?}",
|
||||
dir.display(),
|
||||
err
|
||||
)
|
||||
}
|
||||
for (name, old_value) in self.vars.drain() {
|
||||
env::set_var(name, old_value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
//! Proc macro ABI
|
||||
|
||||
use libloading::Library;
|
||||
use proc_macro::bridge;
|
||||
use proc_macro_api::ProcMacroKind;
|
||||
|
||||
use libloading::Library;
|
||||
|
||||
use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan};
|
||||
|
||||
pub(crate) struct ProcMacros {
|
||||
|
@ -49,10 +50,11 @@ impl ProcMacros {
|
|||
call_site: S,
|
||||
mixed_site: S,
|
||||
) -> Result<tt::Subtree<S>, crate::PanicMessage> {
|
||||
let parsed_body = crate::server::TokenStream::with_subtree(macro_body);
|
||||
let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
|
||||
|
||||
let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| {
|
||||
crate::server::TokenStream::with_subtree(attr)
|
||||
let parsed_attributes = attributes
|
||||
.map_or_else(crate::server_impl::TokenStream::new, |attr| {
|
||||
crate::server_impl::TokenStream::with_subtree(attr)
|
||||
});
|
||||
|
||||
for proc_macro in &self.exported_macros {
|
||||
|
|
|
@ -14,7 +14,7 @@ use proc_macro::bridge::{self, server};
|
|||
use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
|
||||
use tt::{TextRange, TextSize};
|
||||
|
||||
use crate::server::{
|
||||
use crate::server_impl::{
|
||||
delim_to_external, delim_to_internal, literal_with_stringify_parts,
|
||||
token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
|
||||
};
|
||||
|
@ -29,7 +29,7 @@ mod tt {
|
|||
pub type Ident = ::tt::Ident<super::Span>;
|
||||
}
|
||||
|
||||
type TokenStream = crate::server::TokenStream<Span>;
|
||||
type TokenStream = crate::server_impl::TokenStream<Span>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SourceFile;
|
|
@ -7,7 +7,7 @@ use std::{
|
|||
|
||||
use proc_macro::bridge::{self, server};
|
||||
|
||||
use crate::server::{
|
||||
use crate::server_impl::{
|
||||
delim_to_external, delim_to_internal, literal_with_stringify_parts,
|
||||
token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
|
||||
};
|
||||
|
@ -31,7 +31,7 @@ type Spacing = tt::Spacing;
|
|||
#[allow(unused)]
|
||||
type Literal = tt::Literal;
|
||||
type Span = tt::TokenId;
|
||||
type TokenStream = crate::server::TokenStream<Span>;
|
||||
type TokenStream = crate::server_impl::TokenStream<Span>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SourceFile;
|
|
@ -5,10 +5,10 @@ use proc_macro_api::msg::TokenId;
|
|||
use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
|
||||
use tt::TextRange;
|
||||
|
||||
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
|
||||
use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
|
||||
|
||||
fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream<TokenId> {
|
||||
crate::server::TokenStream::with_subtree(
|
||||
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
|
||||
crate::server_impl::TokenStream::with_subtree(
|
||||
mbe::parse_to_token_tree_static_span(call_site, src).unwrap(),
|
||||
)
|
||||
}
|
||||
|
@ -17,8 +17,8 @@ fn parse_string_spanned(
|
|||
anchor: SpanAnchor,
|
||||
call_site: SyntaxContextId,
|
||||
src: &str,
|
||||
) -> crate::server::TokenStream<Span> {
|
||||
crate::server::TokenStream::with_subtree(
|
||||
) -> crate::server_impl::TokenStream<Span> {
|
||||
crate::server_impl::TokenStream::with_subtree(
|
||||
mbe::parse_to_token_tree(anchor, call_site, src).unwrap(),
|
||||
)
|
||||
}
|
||||
|
@ -96,7 +96,8 @@ fn assert_expand_impl(
|
|||
|
||||
pub(crate) fn list() -> Vec<String> {
|
||||
let dylib_path = proc_macro_test_dylib_path();
|
||||
let mut srv = ProcMacroSrv::default();
|
||||
let env = EnvSnapshot::new();
|
||||
let mut srv = ProcMacroSrv::new(&env);
|
||||
let res = srv.list_macros(&dylib_path).unwrap();
|
||||
res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect()
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! A collection of tools for profiling rust-analyzer.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#[cfg(feature = "cpu_profiler")]
|
||||
mod google_cpu_profiler;
|
||||
mod memory_usage;
|
||||
|
|
|
@ -15,8 +15,6 @@
|
|||
//! procedural macros).
|
||||
//! * Lowering of concrete model to a [`base_db::CrateGraph`]
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod build_scripts;
|
||||
mod cargo_workspace;
|
||||
mod cfg;
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
//!
|
||||
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
|
||||
|
|
|
@ -9,8 +9,6 @@
|
|||
//! The `cli` submodule implements some batch-processing analysis, primarily as
|
||||
//! a debugging aid.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
pub mod cli;
|
||||
|
||||
mod caps;
|
||||
|
|
|
@ -529,7 +529,7 @@ impl GlobalState {
|
|||
None => ws.find_sysroot_proc_macro_srv()?,
|
||||
};
|
||||
|
||||
let env = match &ws.kind {
|
||||
let env: FxHashMap<_, _> = match &ws.kind {
|
||||
ProjectWorkspaceKind::Cargo { cargo_config_extra_env, .. }
|
||||
| ProjectWorkspaceKind::DetachedFile {
|
||||
cargo: Some(_),
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
//! specific JSON shapes here -- there's little value in such tests, as we can't
|
||||
//! be sure without a real client anyway.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![allow(clippy::disallowed_types)]
|
||||
|
||||
mod ratoml;
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
//!
|
||||
//! This crate contains utilities to make this kind of source-gen easy.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::{
|
||||
fmt, fs, mem,
|
||||
path::{Path, PathBuf},
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! Missing batteries for standard libraries.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::io as sio;
|
||||
use std::process::Command;
|
||||
use std::{cmp::Ordering, ops, time::Instant};
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
|
||||
|
||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
#[cfg(not(feature = "in-rust-tree"))]
|
||||
extern crate ra_ap_rustc_lexer as rustc_lexer;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
//! * Extracting markup (mainly, `$0` markers) out of fixture strings.
|
||||
//! * marks (see the eponymous module).
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
#![allow(clippy::print_stderr)]
|
||||
|
||||
mod assert_linear;
|
||||
|
|
|
@ -4,8 +4,6 @@
|
|||
//! so `TextEdit` is the ultimate representation of the work done by
|
||||
//! rust-analyzer.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use itertools::Itertools;
|
||||
use std::cmp::max;
|
||||
pub use text_size::{TextRange, TextSize};
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! Discovery of `cargo` & `rustc` executables.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::{env, iter, path::PathBuf};
|
||||
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
//! input and output) of macros. It closely mirrors `proc_macro` crate's
|
||||
//! `TokenTree`.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
pub mod buffer;
|
||||
pub mod iter;
|
||||
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
//! Hopefully, one day a reliable file watching/walking crate appears on
|
||||
//! crates.io, and we can reduce this to trivial glue code.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
path::{Component, Path},
|
||||
|
|
|
@ -38,8 +38,6 @@
|
|||
//! [`Handle`]: loader::Handle
|
||||
//! [`Entries`]: loader::Entry
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod anchored_path;
|
||||
pub mod file_set;
|
||||
pub mod loader;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue