X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=crates%2Fhir_expand%2Fsrc%2Flib.rs;h=27c3f097abb1bc9d0817f31a3c57eaa6962c638f;hb=1505b6a9b4965f1d0b352b2a390821aca2ed4743;hp=51899ca2f65b0ab612b3c32bdfec694dfe9afef4;hpb=fd3942eb620e37a4e4bfdd587d8a2893ccf6fea0;p=rust.git diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 51899ca2f65..27c3f097abb 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -15,16 +15,14 @@ pub mod quote; pub mod eager; pub mod mod_path; +mod fixup; -use base_db::ProcMacroKind; -use either::Either; - -pub use mbe::{ExpandError, ExpandResult, Origin}; -use mod_path::ModPath; +pub use mbe::{Origin, ValueResult}; -use std::{hash::Hash, iter, sync::Arc}; +use std::{fmt, hash::Hash, iter, sync::Arc}; -use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange}; +use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind}; +use either::Either; use syntax::{ algo::{self, skip_trivia_token}, ast::{self, AstNode, HasDocComments}, @@ -37,9 +35,35 @@ builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::TokenExpander, + mod_path::ModPath, proc_macro::ProcMacroExpander, }; +pub type ExpandResult = ValueResult; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum ExpandError { + UnresolvedProcMacro, + Mbe(mbe::ExpandError), + Other(Box), +} + +impl From for ExpandError { + fn from(mbe: mbe::ExpandError) -> Self { + Self::Mbe(mbe) + } +} + +impl fmt::Display for ExpandError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc-macro"), + ExpandError::Mbe(it) => it.fmt(f), + ExpandError::Other(it) => f.write_str(it), + } + } +} + /// Input to the analyzer is a set of files, where each file is identified by /// `FileId` and contains source code. However, another source of source code in /// Rust are macros: each macro can be thought of as producing a "temporary @@ -61,11 +85,13 @@ enum HirFileIdRepr { FileId(FileId), MacroFile(MacroFile), } + impl From for HirFileId { fn from(id: FileId) -> Self { HirFileId(HirFileIdRepr::FileId(id)) } } + impl From for HirFileId { fn from(id: MacroFile) -> Self { HirFileId(HirFileIdRepr::MacroFile(id)) @@ -124,17 +150,17 @@ pub enum MacroCallKind { }, Derive { ast_id: AstId, - derive_name: Box, /// Syntactical index of the invoking `#[derive]` attribute. /// /// Outer attributes are counted first, then inner attributes. This does not support /// out-of-line modules, which may have attributes spread across 2 files! derive_attr_index: u32, + /// Index of the derive macro in the derive attribute + derive_index: u32, }, Attr { ast_id: AstId, - attr_name: Box, - attr_args: (tt::Subtree, mbe::TokenMap), + attr_args: Arc<(tt::Subtree, mbe::TokenMap)>, /// Syntactical index of the invoking `#[attribute]`. /// /// Outer attributes are counted first, then inner attributes. This does not support @@ -151,8 +177,8 @@ pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { HirFileIdRepr::FileId(file_id) => file_id, HirFileIdRepr::MacroFile(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let file_id = match &loc.eager { - Some(EagerCallInfo { included_file: Some(file), .. }) => (*file).into(), + let file_id = match loc.eager { + Some(EagerCallInfo { included_file: Some(file), .. }) => file.into(), _ => loc.kind.file_id(), }; file_id.original_file(db) @@ -249,10 +275,7 @@ pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool { HirFileIdRepr::FileId(_) => false, HirFileIdRepr::MacroFile(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - match loc.def.kind { - MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) => true, - _ => false, - } + matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)) } } } @@ -302,15 +325,15 @@ pub fn as_lazy_macro( } pub fn ast_id(&self) -> Either, AstId> { - let id = match &self.kind { - MacroDefKind::ProcMacro(.., id) => return Either::Right(*id), + let id = match self.kind { + MacroDefKind::ProcMacro(.., id) => return Either::Right(id), MacroDefKind::Declarative(id) | MacroDefKind::BuiltIn(_, id) | MacroDefKind::BuiltInAttr(_, id) | MacroDefKind::BuiltInDerive(_, id) | MacroDefKind::BuiltInEager(_, id) => id, }; - Either::Left(*id) + Either::Left(id) } pub fn is_proc_macro(&self) -> bool { @@ -359,20 +382,15 @@ pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile { /// get only the specific derive that is being referred to. pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; - loop { + let file_id = loop { match kind.file_id().0 { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } - _ => break, + HirFileIdRepr::FileId(file_id) => break file_id, } - } - - // `call_id` is now the outermost macro call, so its location is in a real file. - let file_id = match kind.file_id().0 { - HirFileIdRepr::FileId(it) => it, - HirFileIdRepr::MacroFile(_) => unreachable!("encountered unexpected macro file"), }; + let range = match kind { MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { @@ -434,7 +452,7 @@ pub struct ExpansionInfo { attr_input_or_mac_def: Option>, macro_def: Arc, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, + macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg /// and as such we need to shift tokens if they are part of an attributes input instead of their item. macro_arg_shift: mbe::Shift, @@ -479,7 +497,7 @@ pub fn map_token_down( let token_range = token.value.text_range(); match &loc.kind { - MacroCallKind::Attr { attr_args: (_, map), invoc_attr_index, .. } => { + MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => { let attr = item .doc_comments_and_attrs() .nth(*invoc_attr_index as usize) @@ -493,8 +511,9 @@ pub fn map_token_down( let relative_range = token.value.text_range().checked_sub(attr_input_start)?; // shift by the item's tree's max id - let token_id = - self.macro_arg_shift.shift(map.token_by_range(relative_range)?); + let token_id = self + .macro_arg_shift + .shift(attr_args.1.token_by_range(relative_range)?); Some(token_id) } _ => None, @@ -542,13 +561,13 @@ pub fn map_token_up( // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item. let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args: (_, arg_token_map), .. } => { + MacroCallKind::Attr { attr_args, .. } => { // try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this match self.macro_arg_shift.unshift(token_id) { Some(unshifted) => { token_id = unshifted; - (arg_token_map, self.attr_input_or_mac_def.clone()?.syntax().cloned()) + (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) } None => (&self.macro_arg.1, self.arg.clone()), } @@ -574,7 +593,6 @@ pub fn map_token_up( /// `AstId` points to an AST node in any file. /// /// It is stable across reparses, and can be used as salsa key/value. -// FIXME: isn't this just a `Source>` ? pub type AstId = InFile>; impl AstId { @@ -602,7 +620,6 @@ pub fn new(file_id: HirFileId, value: T) -> InFile { InFile { file_id, value } } - // Similarly, naming here is stupid... pub fn with_value(&self, value: U) -> InFile { InFile::new(self.file_id, value) } @@ -705,24 +722,14 @@ fn ascend_node_border_tokens( ) -> Option> { let expansion = file_id.expansion_info(db)?; - // the input node has only one token ? - let first = skip_trivia_token(node.first_token()?, Direction::Next)?; - let last = skip_trivia_token(node.last_token()?, Direction::Prev)?; - let is_single_token = first == last; - - node.descendants().find_map(|it| { - let first = skip_trivia_token(it.first_token()?, Direction::Next)?; - let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?; - - let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; - let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?; - - if (!is_single_token && first == last) || (first.file_id != last.file_id) { - return None; - } + let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next); + let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev); - Some(InFile::new(first.file_id, (first.value, last.value))) - }) + let first = first_token(node)?; + let last = last_token(node)?; + let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?; + let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?; + (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value))) } fn ascend_call_token( @@ -730,14 +737,14 @@ fn ascend_call_token( expansion: &ExpansionInfo, token: InFile, ) -> Option> { - let (mapped, origin) = expansion.map_token_up(db, token.as_ref())?; - if origin != Origin::Call { - return None; - } - if let Some(info) = mapped.file_id.expansion_info(db) { - return ascend_call_token(db, &info, mapped); + let mut mapping = expansion.map_token_up(db, token.as_ref())?; + while let (mapped, Origin::Call) = mapping { + match mapped.file_id.expansion_info(db) { + Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?, + None => return Some(mapped), + } } - Some(mapped) + None } impl InFile { @@ -758,20 +765,28 @@ pub fn descendants(self) -> impl Iterator> { } pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option> { - match ascend_node_border_tokens(db, self.syntax()) { - Some(InFile { file_id, value: (first, last) }) => { - let original_file = file_id.original_file(db); - if file_id != original_file.into() { - let range = first.text_range().cover(last.text_range()); - tracing::error!("Failed mapping up more for {:?}", range); - return None; - } - let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; - Some(InFile::new(file_id, anc.ancestors().find_map(N::cast)?)) + // This kind of upmapping can only be achieved in attribute expanded files, + // as we don't have node inputs otherwise and therefor can't find an `N` node in the input + if !self.file_id.is_macro() { + return Some(self); + } else if !self.file_id.is_attr_macro(db) { + return None; + } + + if let Some(InFile { file_id, value: (first, last) }) = + ascend_node_border_tokens(db, self.syntax()) + { + if file_id.is_macro() { + let range = first.text_range().cover(last.text_range()); + tracing::error!("Failed mapping out of macro file for {:?}", range); + return None; } - _ if !self.file_id.is_macro() => Some(self), - _ => None, + // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes + let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; + let value = anc.ancestors().find_map(N::cast)?; + return Some(InFile::new(file_id, value)); } + None } pub fn syntax(&self) -> InFile<&SyntaxNode> { @@ -821,10 +836,10 @@ pub fn from_call_site(call: &ast::MacroCall) -> ExpandTo { MACRO_TYPE => ExpandTo::Type, ARG_LIST | TRY_EXPR | TUPLE_EXPR | PAREN_EXPR | ARRAY_EXPR | FOR_EXPR | PATH_EXPR - | CLOSURE_EXPR | CONDITION | BREAK_EXPR | RETURN_EXPR | MATCH_EXPR | MATCH_ARM - | MATCH_GUARD | RECORD_EXPR_FIELD | CALL_EXPR | INDEX_EXPR | METHOD_CALL_EXPR - | FIELD_EXPR | AWAIT_EXPR | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR - | BIN_EXPR => ExpandTo::Expr, + | CLOSURE_EXPR | BREAK_EXPR | RETURN_EXPR | MATCH_EXPR | MATCH_ARM | MATCH_GUARD + | RECORD_EXPR_FIELD | CALL_EXPR | INDEX_EXPR | METHOD_CALL_EXPR | FIELD_EXPR + | AWAIT_EXPR | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR + | LET_EXPR => ExpandTo::Expr, LET_STMT => { // FIXME: Handle LHS Pattern ExpandTo::Expr