Skip to content

Commit

Permalink
Auto merge of #16088 - Veykril:proc-macro-srv-2, r=Veykril
Browse files Browse the repository at this point in the history
feat: Implement a rust-analyzer span backed proc-macro server mode

This implements the basic span APIs. Basically anything that doesn't require talking back to the client for information access.

This also commits our syntax fixup marker to use an `ErasedAstFileId` of `!0-1` aka `0xffff_fffe`, instead of using a dummy FileId as a marker, as we need that for the `SourceFile` API to be implementable. The reason as to why the server needs to know about this at all is to prevent it from creating invalid fixup spans which could make r-a panic.
  • Loading branch information
bors committed Dec 22, 2023
2 parents 3ce3593 + 5761b50 commit 20e09c6
Show file tree
Hide file tree
Showing 31 changed files with 1,479 additions and 654 deletions.
1 change: 0 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ jobs:
- 'crates/proc-macro-api/**'
- 'crates/proc-macro-srv/**'
- 'crates/proc-macro-srv-cli/**'
- 'crates/proc-macro-test/**'
rust:
needs: changes
Expand Down
7 changes: 2 additions & 5 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[workspace]
members = ["xtask/", "lib/*", "crates/*"]
exclude = ["crates/proc-macro-test/imp"]
exclude = ["crates/proc-macro-srv/proc-macro-test/"]
resolver = "2"

[workspace.package]
Expand Down Expand Up @@ -81,7 +81,6 @@ vfs = { path = "./crates/vfs", version = "0.0.0" }
rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }

# local crates that aren't published to crates.io. These should not have versions.
proc-macro-test = { path = "./crates/proc-macro-test" }
sourcegen = { path = "./crates/sourcegen" }
test-fixture = { path = "./crates/test-fixture" }
test-utils = { path = "./crates/test-utils" }
Expand Down
13 changes: 13 additions & 0 deletions crates/hir-expand/src/builtin_fn_macro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ register_builtin! {
(format_args, FormatArgs) => format_args_expand,
(const_format_args, ConstFormatArgs) => format_args_expand,
(format_args_nl, FormatArgsNl) => format_args_nl_expand,
(quote, Quote) => quote_expand,

EAGER:
(compile_error, CompileError) => compile_error_expand,
Expand Down Expand Up @@ -770,3 +771,15 @@ fn option_env_expand(

ExpandResult::ok(expanded)
}

fn quote_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
_tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
ExpandError::other("quote! is not implemented"),
)
}
44 changes: 21 additions & 23 deletions crates/hir-expand/src/fixup.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SpanData};
use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER};
use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
Expand Down Expand Up @@ -39,13 +38,11 @@ impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}

// censoring -> just don't convert the node
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
// We mark spans with `FIXUP_DUMMY_AST_ID` to indicate that they are fake.
const FIXUP_DUMMY_AST_ID: ErasedFileAstId = FIXUP_ERASED_FILE_AST_ID_MARKER;
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
// If the fake span has this range end, that means that the range start is an index into the
// `original` list in `SyntaxFixupUndoInfo`.
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);

pub(crate) fn fixup_syntax(
Expand All @@ -58,13 +55,13 @@ pub(crate) fn fixup_syntax(
let mut preorder = node.preorder();
let mut original = Vec::new();
let dummy_range = FIXUP_DUMMY_RANGE;
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
// the index into the replacement vec but only if the end points to !0
let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
ctx: span_map.span_for_range(range).ctx,
let fake_span = |range| {
let span = span_map.span_for_range(range);
SpanData {
range: dummy_range,
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span.ctx,
}
};
while let Some(event) = preorder.next() {
let syntax::WalkEvent::Enter(node) = event else { continue };
Expand All @@ -76,12 +73,13 @@ pub(crate) fn fixup_syntax(
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let idx = original.len() as u32;
original.push(original_tree);
let span = span_map.span_for_range(node_range);
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: SpanData {
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: dummy_anchor,
ctx: span_map.span_for_range(node_range).ctx,
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span.ctx,
},
});
append.insert(node.clone().into(), vec![replacement]);
Expand Down Expand Up @@ -304,8 +302,8 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
let undo_info = &**undo_info;
#[allow(deprecated)]
if never!(
tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) {
tt.delimiter.close = SpanData::DUMMY;
tt.delimiter.open = SpanData::DUMMY;
Expand All @@ -321,16 +319,16 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
let span = leaf.span();
let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID;
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
is_real_leaf || is_replaced_node
}
tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
{
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
// might copy them if the proc-macro asks for it, so we need to filter those out
Expand All @@ -341,7 +339,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
if leaf.span().anchor.ast_id == FIXUP_DUMMY_AST_ID {
// we have a fake node here, we need to replace it again with the original
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
Expand Down
1 change: 1 addition & 0 deletions crates/hir-expand/src/name.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,7 @@ pub mod known {
log_syntax,
module_path,
option_env,
quote,
std_panic,
stringify,
trace_macros,
Expand Down
68 changes: 58 additions & 10 deletions crates/hir/src/source_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1162,9 +1162,40 @@ fn resolve_hir_path_qualifier(
resolver: &Resolver,
path: &Path,
) -> Option<PathResolution> {
resolver
.resolve_path_in_type_ns_fully(db.upcast(), &path)
.map(|ty| match ty {
(|| {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => {
let (_, res) =
TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
.lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
match remaining_idx {
Some(remaining_idx) => {
if remaining_idx + 1 == path.segments().len() {
Some((ty, path.segments().last()))
} else {
None
}
}
None => Some((ty, None)),
}
}
}?;

// If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
// within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
if let Some(type_alias_id) =
db.trait_data(trait_id).associated_type_by_name(unresolved.name)
{
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
}

let res = match ty {
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
Expand All @@ -1175,11 +1206,28 @@ fn resolve_hir_path_qualifier(
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
})
.or_else(|| {
resolver
.resolve_module_path_in_items(db.upcast(), path.mod_path()?)
.take_types()
.map(|it| PathResolution::Def(it.into()))
})
};
match unresolved {
Some(unresolved) => resolver
.generic_def()
.and_then(|def| {
hir_ty::associated_type_shorthand_candidates(
db,
def,
res.in_type_ns()?,
|name, id| (name == unresolved.name).then_some(id),
)
})
.map(TypeAlias::from)
.map(Into::into)
.map(PathResolution::Def),
None => Some(res),
}
})()
.or_else(|| {
resolver
.resolve_module_path_in_items(db.upcast(), path.mod_path()?)
.take_types()
.map(|it| PathResolution::Def(it.into()))
})
}
21 changes: 17 additions & 4 deletions crates/proc-macro-api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@ use triomphe::Arc;
use serde::{Deserialize, Serialize};

use crate::{
msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
msg::{
deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro,
ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT,
},
process::ProcMacroProcessSrv,
};

Expand Down Expand Up @@ -166,6 +169,11 @@ impl ProcMacro {
call_site,
mixed_site,
},
span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT {
serialize_span_data_index_map(&span_data_table)
} else {
Vec::new()
},
};

let response = self
Expand All @@ -178,9 +186,14 @@ impl ProcMacro {
msg::Response::ExpandMacro(it) => {
Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
Err(ServerError { message: "unexpected response".to_string(), io: None })
}
msg::Response::ExpandMacroExtended(it) => Ok(it.map(|resp| {
FlatTree::to_subtree_resolved(
resp.tree,
version,
&deserialize_span_data_index_map(&resp.span_data_table),
)
})),
_ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
}
}
}
Loading

0 comments on commit 20e09c6

Please sign in to comment.