"libc",
]
+[[package]]
+name = "hir_expand"
+version = "0.0.0"
+dependencies = [
+ "arena",
+ "base_db",
+ "either",
+ "log",
+ "mbe",
+ "parser",
+ "profile",
+ "rustc-hash",
+ "syntax",
+ "test_utils",
+ "tt",
+]
+
[[package]]
name = "home"
version = "0.5.3"
"arrayvec",
"base_db",
"either",
+ "hir_expand",
"itertools",
"log",
"profile",
"ra_hir_def",
- "ra_hir_expand",
"ra_hir_ty",
"rustc-hash",
"stdx",
"either",
"expect",
"fst",
+ "hir_expand",
"indexmap",
"itertools",
"log",
"mbe",
"once_cell",
"profile",
- "ra_hir_expand",
"rustc-hash",
"smallvec",
"stdx",
"tt",
]
-[[package]]
-name = "ra_hir_expand"
-version = "0.1.0"
-dependencies = [
- "arena",
- "base_db",
- "either",
- "log",
- "mbe",
- "parser",
- "profile",
- "rustc-hash",
- "syntax",
- "test_utils",
- "tt",
-]
-
[[package]]
name = "ra_hir_ty"
version = "0.1.0"
"chalk-solve",
"ena",
"expect",
+ "hir_expand",
"itertools",
"log",
"profile",
"ra_hir_def",
- "ra_hir_expand",
"rustc-hash",
"scoped-tls",
"smallvec",
--- /dev/null
+[package]
+name = "hir_expand"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+authors = ["rust-analyzer developers"]
+edition = "2018"
+
+[lib]
+doctest = false
+
+[dependencies]
+log = "0.4.8"
+either = "1.5.3"
+rustc-hash = "1.0.0"
+
+arena = { path = "../arena" }
+base_db = { path = "../base_db" }
+syntax = { path = "../syntax" }
+parser = { path = "../parser" }
+profile = { path = "../profile" }
+tt = { path = "../tt" }
+mbe = { path = "../mbe" }
+test_utils = { path = "../test_utils"}
--- /dev/null
+//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
+//! and macro calls.
+//!
+//! Specifically, it enumerates all items in a file and uses position of a an
+//! item as an ID. That way, id's don't change unless the set of items itself
+//! changes.
+
+use std::{
+ any::type_name,
+ fmt,
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+};
+
+use arena::{Arena, Idx};
+use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+
+/// `AstId` points to an AST node in a specific file.
+pub struct FileAstId<N: AstNode> {
+ raw: ErasedFileAstId,
+ _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for FileAstId<N> {
+ fn clone(&self) -> FileAstId<N> {
+ *self
+ }
+}
+impl<N: AstNode> Copy for FileAstId<N> {}
+
+impl<N: AstNode> PartialEq for FileAstId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.raw == other.raw
+ }
+}
+impl<N: AstNode> Eq for FileAstId<N> {}
+impl<N: AstNode> Hash for FileAstId<N> {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.raw.hash(hasher);
+ }
+}
+
+impl<N: AstNode> fmt::Debug for FileAstId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
+ }
+}
+
+impl<N: AstNode> FileAstId<N> {
+ // Can't make this a From implementation because of coherence
+ pub fn upcast<M: AstNode>(self) -> FileAstId<M>
+ where
+ N: Into<M>,
+ {
+ FileAstId { raw: self.raw, _ty: PhantomData }
+ }
+}
+
+type ErasedFileAstId = Idx<SyntaxNodePtr>;
+
+/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
+#[derive(Debug, PartialEq, Eq, Default)]
+pub struct AstIdMap {
+ arena: Arena<SyntaxNodePtr>,
+}
+
+impl AstIdMap {
+ pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
+ assert!(node.parent().is_none());
+ let mut res = AstIdMap { arena: Arena::default() };
+ // By walking the tree in breadth-first order we make sure that parents
+ // get lower ids then children. That is, adding a new child does not
+ // change parent's id. This means that, say, adding a new function to a
+ // trait does not change ids of top-level items, which helps caching.
+ bfs(node, |it| {
+ if let Some(module_item) = ast::Item::cast(it) {
+ res.alloc(module_item.syntax());
+ }
+ });
+ res
+ }
+
+ pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
+ let raw = self.erased_ast_id(item.syntax());
+ FileAstId { raw, _ty: PhantomData }
+ }
+ fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
+ let ptr = SyntaxNodePtr::new(item);
+ match self.arena.iter().find(|(_id, i)| **i == ptr) {
+ Some((it, _)) => it,
+ None => panic!(
+ "Can't find {:?} in AstIdMap:\n{:?}",
+ item,
+ self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
+ ),
+ }
+ }
+
+ pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ self.arena[id.raw].clone().cast::<N>().unwrap()
+ }
+
+ fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
+ self.arena.alloc(SyntaxNodePtr::new(item))
+ }
+}
+
+/// Walks the subtree in bfs order, calling `f` for each node.
+fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
+ let mut curr_layer = vec![node.clone()];
+ let mut next_layer = vec![];
+ while !curr_layer.is_empty() {
+ curr_layer.drain(..).for_each(|node| {
+ next_layer.extend(node.children());
+ f(node);
+ });
+ std::mem::swap(&mut curr_layer, &mut next_layer);
+ }
+}
--- /dev/null
+//! Builtin derives.
+
+use log::debug;
+
+use parser::FragmentKind;
+use syntax::{
+ ast::{self, AstNode, GenericParamsOwner, ModuleItemOwner, NameOwner},
+ match_ast,
+};
+
+use crate::{db::AstDatabase, name, quote, LazyMacroId, MacroDefId, MacroDefKind};
+
+macro_rules! register_builtin {
+ ( $($trait:ident => $expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinDeriveExpander {
+ $($trait),*
+ }
+
+ impl BuiltinDeriveExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+ ) -> Result<tt::Subtree, mbe::ExpandError> {
+ let expander = match *self {
+ $( BuiltinDeriveExpander::$trait => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+ }
+
+ pub fn find_builtin_derive(ident: &name::Name) -> Option<MacroDefId> {
+ let kind = match ident {
+ $( id if id == &name::name![$trait] => BuiltinDeriveExpander::$trait, )*
+ _ => return None,
+ };
+
+ Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind), local_inner: false })
+ }
+ };
+}
+
+register_builtin! {
+ Copy => copy_expand,
+ Clone => clone_expand,
+ Default => default_expand,
+ Debug => debug_expand,
+ Hash => hash_expand,
+ Ord => ord_expand,
+ PartialOrd => partial_ord_expand,
+ Eq => eq_expand,
+ PartialEq => partial_eq_expand
+}
+
+struct BasicAdtInfo {
+ name: tt::Ident,
+ type_params: usize,
+}
+
+fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
+ let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs?
+ let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
+ debug!("derive node didn't parse");
+ mbe::ExpandError::UnexpectedToken
+ })?;
+ let item = macro_items.items().next().ok_or_else(|| {
+ debug!("no module item parsed");
+ mbe::ExpandError::NoMatchingRule
+ })?;
+ let node = item.syntax();
+ let (name, params) = match_ast! {
+ match node {
+ ast::Struct(it) => (it.name(), it.generic_param_list()),
+ ast::Enum(it) => (it.name(), it.generic_param_list()),
+ ast::Union(it) => (it.name(), it.generic_param_list()),
+ _ => {
+ debug!("unexpected node is {:?}", node);
+ return Err(mbe::ExpandError::ConversionError)
+ },
+ }
+ };
+ let name = name.ok_or_else(|| {
+ debug!("parsed item has no name");
+ mbe::ExpandError::NoMatchingRule
+ })?;
+ let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| {
+ debug!("name token not found");
+ mbe::ExpandError::ConversionError
+ })?;
+ let name_token = tt::Ident { id: name_token_id, text: name.text().clone() };
+ let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
+ Ok(BasicAdtInfo { name: name_token, type_params })
+}
+
+fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
+ let mut result = Vec::<tt::TokenTree>::new();
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '<',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ for i in 0..n {
+ if i > 0 {
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ }
+ result.push(
+ tt::Leaf::Ident(tt::Ident {
+ id: tt::TokenId::unspecified(),
+ text: format!("T{}", i).into(),
+ })
+ .into(),
+ );
+ result.extend(bound.iter().cloned());
+ }
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '>',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ result
+}
+
+fn expand_simple_derive(
+ tt: &tt::Subtree,
+ trait_path: tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let info = parse_adt(tt)?;
+ let name = info.name;
+ let trait_path_clone = trait_path.token_trees.clone();
+ let bound = (quote! { : ##trait_path_clone }).token_trees;
+ let type_params = make_type_args(info.type_params, bound);
+ let type_args = make_type_args(info.type_params, Vec::new());
+ let trait_path = trait_path.token_trees;
+ let expanded = quote! {
+ impl ##type_params ##trait_path for #name ##type_args {}
+ };
+ Ok(expanded)
+}
+
+fn find_builtin_crate(db: &dyn AstDatabase, id: LazyMacroId) -> tt::TokenTree {
+ // FIXME: make hygiene works for builtin derive macro
+ // such that $crate can be used here.
+ let cg = db.crate_graph();
+ let krate = db.lookup_intern_macro(id).krate;
+
+ // XXX
+ // All crates except core itself should have a dependency on core,
+ // We detect `core` by seeing whether it doesn't have such a dependency.
+ let tt = if cg[krate].dependencies.iter().any(|dep| &*dep.name == "core") {
+ quote! { core }
+ } else {
+ quote! { crate }
+ };
+
+ tt.token_trees[0].clone()
+}
+
+fn copy_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::marker::Copy })
+}
+
+fn clone_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::clone::Clone })
+}
+
+fn default_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::default::Default })
+}
+
+fn debug_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::fmt::Debug })
+}
+
+fn hash_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::hash::Hash })
+}
+
+fn eq_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Eq })
+}
+
+fn partial_eq_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialEq })
+}
+
+fn ord_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Ord })
+}
+
+fn partial_ord_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd })
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::WithFixture, CrateId, SourceDatabase};
+ use name::{known, Name};
+
+ use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc};
+
+ use super::*;
+
+ fn expand_builtin_derive(s: &str, name: Name) -> String {
+ let def = find_builtin_derive(&name).unwrap();
+ let fixture = format!(
+ r#"//- /main.rs crate:main deps:core
+<|>
+{}
+//- /lib.rs crate:core
+// empty
+"#,
+ s
+ );
+
+ let (db, file_pos) = TestDB::with_position(&fixture);
+ let file_id = file_pos.file_id;
+ let parsed = db.parse(file_id);
+ let items: Vec<_> =
+ parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect();
+
+ let ast_id_map = db.ast_id_map(file_id.into());
+
+ let attr_id = AstId::new(file_id.into(), ast_id_map.ast_id(&items[0]));
+
+ let loc = MacroCallLoc {
+ def,
+ krate: CrateId(0),
+ kind: MacroCallKind::Attr(attr_id, name.to_string()),
+ };
+
+ let id: MacroCallId = db.intern_macro(loc).into();
+ let parsed = db.parse_or_expand(id.as_file()).unwrap();
+
+ // FIXME text() for syntax nodes parsed from token tree looks weird
+ // because there's no whitespace, see below
+ parsed.text().to_string()
+ }
+
+ #[test]
+ fn test_copy_expand_simple() {
+ let expanded = expand_builtin_derive(
+ r#"
+ #[derive(Copy)]
+ struct Foo;
+"#,
+ known::Copy,
+ );
+
+ assert_eq!(expanded, "impl< >core::marker::CopyforFoo< >{}");
+ }
+
+ #[test]
+ fn test_copy_expand_with_type_params() {
+ let expanded = expand_builtin_derive(
+ r#"
+ #[derive(Copy)]
+ struct Foo<A, B>;
+"#,
+ known::Copy,
+ );
+
+ assert_eq!(
+ expanded,
+ "impl<T0:core::marker::Copy,T1:core::marker::Copy>core::marker::CopyforFoo<T0,T1>{}"
+ );
+ }
+
+ #[test]
+ fn test_copy_expand_with_lifetimes() {
+ let expanded = expand_builtin_derive(
+ r#"
+ #[derive(Copy)]
+ struct Foo<A, B, 'a, 'b>;
+"#,
+ known::Copy,
+ );
+
+ // We currently just ignore lifetimes
+
+ assert_eq!(
+ expanded,
+ "impl<T0:core::marker::Copy,T1:core::marker::Copy>core::marker::CopyforFoo<T0,T1>{}"
+ );
+ }
+
+ #[test]
+ fn test_clone_expand() {
+ let expanded = expand_builtin_derive(
+ r#"
+ #[derive(Clone)]
+ struct Foo<A, B>;
+"#,
+ known::Clone,
+ );
+
+ assert_eq!(
+ expanded,
+ "impl<T0:core::clone::Clone,T1:core::clone::Clone>core::clone::CloneforFoo<T0,T1>{}"
+ );
+ }
+}
--- /dev/null
+//! Builtin macro
+use crate::{
+ db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId,
+ MacroDefId, MacroDefKind, TextSize,
+};
+
+use base_db::FileId;
+use either::Either;
+use mbe::parse_to_token_tree;
+use parser::FragmentKind;
+use syntax::ast::{self, AstToken, HasStringValue};
+
+macro_rules! register_builtin {
+ ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinFnLikeExpander {
+ $($kind),*
+ }
+
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum EagerExpander {
+ $($e_kind),*
+ }
+
+ impl BuiltinFnLikeExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+ ) -> Result<tt::Subtree, mbe::ExpandError> {
+ let expander = match *self {
+ $( BuiltinFnLikeExpander::$kind => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+ }
+
+ impl EagerExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+ ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let expander = match *self {
+ $( EagerExpander::$e_kind => $e_expand, )*
+ };
+ expander(db,arg_id,tt)
+ }
+ }
+
+ fn find_by_name(ident: &name::Name) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> {
+ match ident {
+ $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )*
+ $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )*
+ _ => return None,
+ }
+ }
+ };
+}
+
+pub fn find_builtin_macro(
+ ident: &name::Name,
+ krate: CrateId,
+ ast_id: AstId<ast::MacroCall>,
+) -> Option<MacroDefId> {
+ let kind = find_by_name(ident)?;
+
+ match kind {
+ Either::Left(kind) => Some(MacroDefId {
+ krate: Some(krate),
+ ast_id: Some(ast_id),
+ kind: MacroDefKind::BuiltIn(kind),
+ local_inner: false,
+ }),
+ Either::Right(kind) => Some(MacroDefId {
+ krate: Some(krate),
+ ast_id: Some(ast_id),
+ kind: MacroDefKind::BuiltInEager(kind),
+ local_inner: false,
+ }),
+ }
+}
+
+register_builtin! {
+ LAZY:
+ (column, Column) => column_expand,
+ (compile_error, CompileError) => compile_error_expand,
+ (file, File) => file_expand,
+ (line, Line) => line_expand,
+ (assert, Assert) => assert_expand,
+ (stringify, Stringify) => stringify_expand,
+ (format_args, FormatArgs) => format_args_expand,
+ // format_args_nl only differs in that it adds a newline in the end,
+ // so we use the same stub expansion for now
+ (format_args_nl, FormatArgsNl) => format_args_expand,
+
+ EAGER:
+ (concat, Concat) => concat_expand,
+ (include, Include) => include_expand,
+ (include_bytes, IncludeBytes) => include_bytes_expand,
+ (include_str, IncludeStr) => include_str_expand,
+ (env, Env) => env_expand,
+ (option_env, OptionEnv) => option_env_expand
+}
+
+fn line_expand(
+ _db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ _tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ // dummy implementation for type-checking purposes
+ let line_num = 0;
+ let expanded = quote! {
+ #line_num
+ };
+
+ Ok(expanded)
+}
+
+fn stringify_expand(
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ _tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let loc = db.lookup_intern_macro(id);
+
+ let macro_content = {
+ let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
+ let macro_args = arg;
+ let text = macro_args.text();
+ let without_parens = TextSize::of('(')..text.len() - TextSize::of(')');
+ text.slice(without_parens).to_string()
+ };
+
+ let expanded = quote! {
+ #macro_content
+ };
+
+ Ok(expanded)
+}
+
+fn column_expand(
+ _db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ _tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ // dummy implementation for type-checking purposes
+ let col_num = 0;
+ let expanded = quote! {
+ #col_num
+ };
+
+ Ok(expanded)
+}
+
+fn assert_expand(
+ _db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ // A hacky implementation for goto def and hover
+ // We expand `assert!(cond, arg1, arg2)` to
+ // ```
+ // {(cond, &(arg1), &(arg2));}
+ // ```,
+ // which is wrong but useful.
+
+ let mut args = Vec::new();
+ let mut current = Vec::new();
+ for tt in tt.token_trees.iter().cloned() {
+ match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => {
+ args.push(current);
+ current = Vec::new();
+ }
+ _ => {
+ current.push(tt);
+ }
+ }
+ }
+ if !current.is_empty() {
+ args.push(current);
+ }
+
+ let arg_tts = args.into_iter().flat_map(|arg| {
+ quote! { &(##arg), }
+ }.token_trees).collect::<Vec<_>>();
+
+ let expanded = quote! {
+ { { (##arg_tts); } }
+ };
+ Ok(expanded)
+}
+
+fn file_expand(
+ _db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ _tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ // FIXME: RA purposefully lacks knowledge of absolute file names
+ // so just return "".
+ let file_name = "";
+
+ let expanded = quote! {
+ #file_name
+ };
+
+ Ok(expanded)
+}
+
+fn compile_error_expand(
+ _db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ if tt.count() == 1 {
+ if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] {
+ let s = it.text.as_str();
+ if s.contains('"') {
+ return Ok(quote! { loop { #it }});
+ }
+ };
+ }
+
+ Err(mbe::ExpandError::BindingError("Must be a string".into()))
+}
+
+fn format_args_expand(
+ _db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ tt: &tt::Subtree,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ // We expand `format_args!("", a1, a2)` to
+ // ```
+ // std::fmt::Arguments::new_v1(&[], &[
+ // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt),
+ // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt),
+ // ])
+ // ```,
+ // which is still not really correct, but close enough for now
+ let mut args = Vec::new();
+ let mut current = Vec::new();
+ for tt in tt.token_trees.iter().cloned() {
+ match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => {
+ args.push(current);
+ current = Vec::new();
+ }
+ _ => {
+ current.push(tt);
+ }
+ }
+ }
+ if !current.is_empty() {
+ args.push(current);
+ }
+ if args.is_empty() {
+ return Err(mbe::ExpandError::NoMatchingRule);
+ }
+ let _format_string = args.remove(0);
+ let arg_tts = args.into_iter().flat_map(|arg| {
+ quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), }
+ }.token_trees).collect::<Vec<_>>();
+ let expanded = quote! {
+ std::fmt::Arguments::new_v1(&[], &[##arg_tts])
+ };
+ Ok(expanded)
+}
+
+fn unquote_str(lit: &tt::Literal) -> Option<String> {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ let token = ast::String::cast(lit)?;
+ token.value().map(|it| it.into_owned())
+}
+
+fn concat_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let mut text = String::new();
+ for (i, t) in tt.token_trees.iter().enumerate() {
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
+ text += &unquote_str(&it).ok_or_else(|| mbe::ExpandError::ConversionError)?;
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ _ => return Err(mbe::ExpandError::UnexpectedToken),
+ }
+ }
+
+ Ok((quote!(#text), FragmentKind::Expr))
+}
+
+fn relative_file(
+ db: &dyn AstDatabase,
+ call_id: MacroCallId,
+ path: &str,
+ allow_recursion: bool,
+) -> Option<FileId> {
+ let call_site = call_id.as_file().original_file(db);
+ let res = db.resolve_path(call_site, path)?;
+ // Prevent include itself
+ if res == call_site && !allow_recursion {
+ None
+ } else {
+ Some(res)
+ }
+}
+
+fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
+ tt.token_trees
+ .get(0)
+ .and_then(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it),
+ _ => None,
+ })
+ .ok_or_else(|| mbe::ExpandError::ConversionError)
+}
+
+fn include_expand(
+ db: &dyn AstDatabase,
+ arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let path = parse_string(tt)?;
+ let file_id = relative_file(db, arg_id.into(), &path, false)
+ .ok_or_else(|| mbe::ExpandError::ConversionError)?;
+
+ // FIXME:
+ // Handle include as expression
+ let res = parse_to_token_tree(&db.file_text(file_id))
+ .ok_or_else(|| mbe::ExpandError::ConversionError)?
+ .0;
+
+ Ok((res, FragmentKind::Items))
+}
+
+fn include_bytes_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let _path = parse_string(tt)?;
+
+ // FIXME: actually read the file here if the user asked for macro expansion
+ let res = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+ text: r#"b"""#.into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ };
+ Ok((res, FragmentKind::Expr))
+}
+
+fn include_str_expand(
+ db: &dyn AstDatabase,
+ arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let path = parse_string(tt)?;
+
+ // FIXME: we're not able to read excluded files (which is most of them because
+ // it's unusual to `include_str!` a Rust file), but we can return an empty string.
+ // Ideally, we'd be able to offer a precise expansion if the user asks for macro
+ // expansion.
+ let file_id = match relative_file(db, arg_id.into(), &path, true) {
+ Some(file_id) => file_id,
+ None => {
+ return Ok((quote!(""), FragmentKind::Expr));
+ }
+ };
+
+ let text = db.file_text(file_id);
+ let text = &*text;
+
+ Ok((quote!(#text), FragmentKind::Expr))
+}
+
+fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Option<String> {
+ let krate = db.lookup_intern_eager_expansion(arg_id).krate;
+ db.crate_graph()[krate].env.get(key)
+}
+
+fn env_expand(
+ db: &dyn AstDatabase,
+ arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let key = parse_string(tt)?;
+
+ // FIXME:
+ // If the environment variable is not defined int rustc, then a compilation error will be emitted.
+ // We might do the same if we fully support all other stuffs.
+ // But for now on, we should return some dummy string for better type infer purpose.
+ // However, we cannot use an empty string here, because for
+ // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
+ // `include!("foo.rs"), which might go to infinite loop
+ let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| "__RA_UNIMPLEMENTED__".to_string());
+ let expanded = quote! { #s };
+
+ Ok((expanded, FragmentKind::Expr))
+}
+
+fn option_env_expand(
+ db: &dyn AstDatabase,
+ arg_id: EagerMacroId,
+ tt: &tt::Subtree,
+) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
+ let key = parse_string(tt)?;
+ let expanded = match get_env_inner(db, arg_id, &key) {
+ None => quote! { std::option::Option::None::<&str> },
+ Some(s) => quote! { std::option::Some(#s) },
+ };
+
+ Ok((expanded, FragmentKind::Expr))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ name::AsName, test_db::TestDB, AstNode, EagerCallLoc, MacroCallId, MacroCallKind,
+ MacroCallLoc,
+ };
+ use base_db::{fixture::WithFixture, SourceDatabase};
+ use std::sync::Arc;
+ use syntax::ast::NameOwner;
+
+ fn expand_builtin_macro(ra_fixture: &str) -> String {
+ let (db, file_id) = TestDB::with_single_file(&ra_fixture);
+ let parsed = db.parse(file_id);
+ let macro_calls: Vec<_> =
+ parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect();
+
+ let ast_id_map = db.ast_id_map(file_id.into());
+
+ let expander = find_by_name(¯o_calls[0].name().unwrap().as_name()).unwrap();
+
+ let krate = CrateId(0);
+ let file_id = match expander {
+ Either::Left(expander) => {
+ // the first one should be a macro_rules
+ let def = MacroDefId {
+ krate: Some(CrateId(0)),
+ ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))),
+ kind: MacroDefKind::BuiltIn(expander),
+ local_inner: false,
+ };
+
+ let loc = MacroCallLoc {
+ def,
+ krate,
+ kind: MacroCallKind::FnLike(AstId::new(
+ file_id.into(),
+ ast_id_map.ast_id(¯o_calls[1]),
+ )),
+ };
+
+ let id: MacroCallId = db.intern_macro(loc).into();
+ id.as_file()
+ }
+ Either::Right(expander) => {
+ // the first one should be a macro_rules
+ let def = MacroDefId {
+ krate: Some(krate),
+ ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))),
+ kind: MacroDefKind::BuiltInEager(expander),
+ local_inner: false,
+ };
+
+ let args = macro_calls[1].token_tree().unwrap();
+ let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0;
+
+ let arg_id = db.intern_eager_expansion({
+ EagerCallLoc {
+ def,
+ fragment: FragmentKind::Expr,
+ subtree: Arc::new(parsed_args.clone()),
+ krate,
+ file_id: file_id.into(),
+ }
+ });
+
+ let (subtree, fragment) = expander.expand(&db, arg_id, &parsed_args).unwrap();
+ let eager = EagerCallLoc {
+ def,
+ fragment,
+ subtree: Arc::new(subtree),
+ krate,
+ file_id: file_id.into(),
+ };
+
+ let id: MacroCallId = db.intern_eager_expansion(eager).into();
+ id.as_file()
+ }
+ };
+
+ db.parse_or_expand(file_id).unwrap().to_string()
+ }
+
+ #[test]
+ fn test_column_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! column {() => {}}
+ column!()
+ "#,
+ );
+
+ assert_eq!(expanded, "0");
+ }
+
+ #[test]
+ fn test_line_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! line {() => {}}
+ line!()
+ "#,
+ );
+
+ assert_eq!(expanded, "0");
+ }
+
+ #[test]
+ fn test_stringify_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! stringify {() => {}}
+ stringify!(a b c)
+ "#,
+ );
+
+ assert_eq!(expanded, "\"a b c\"");
+ }
+
+ #[test]
+ fn test_env_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! env {() => {}}
+ env!("TEST_ENV_VAR")
+ "#,
+ );
+
+ assert_eq!(expanded, "\"__RA_UNIMPLEMENTED__\"");
+ }
+
+ #[test]
+ fn test_option_env_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! option_env {() => {}}
+ option_env!("TEST_ENV_VAR")
+ "#,
+ );
+
+ assert_eq!(expanded, "std::option::Option::None:: < &str>");
+ }
+
+ #[test]
+ fn test_file_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! file {() => {}}
+ file!()
+ "#,
+ );
+
+ assert_eq!(expanded, "\"\"");
+ }
+
+ #[test]
+ fn test_assert_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! assert {
+ ($cond:expr) => ({ /* compiler built-in */ });
+ ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+ }
+ assert!(true, "{} {:?}", arg1(a, b, c), arg2);
+ "#,
+ );
+
+ assert_eq!(expanded, "{{(&(true), &(\"{} {:?}\"), &(arg1(a,b,c)), &(arg2),);}}");
+ }
+
+ #[test]
+ fn test_compile_error_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! compile_error {
+ ($msg:expr) => ({ /* compiler built-in */ });
+ ($msg:expr,) => ({ /* compiler built-in */ })
+ }
+ compile_error!("error!");
+ "#,
+ );
+
+ assert_eq!(expanded, r#"loop{"error!"}"#);
+ }
+
+ #[test]
+ fn test_format_args_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+ }
+ format_args!("{} {:?}", arg1(a, b, c), arg2);
+ "#,
+ );
+
+ assert_eq!(
+ expanded,
+ r#"std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a,b,c)),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(arg2),std::fmt::Display::fmt),])"#
+ );
+ }
+
+ #[test]
+ fn test_include_bytes_expand() {
+ let expanded = expand_builtin_macro(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! include_bytes {
+ ($file:expr) => {{ /* compiler built-in */ }};
+ ($file:expr,) => {{ /* compiler built-in */ }};
+ }
+ include_bytes("foo");
+ "#,
+ );
+
+ assert_eq!(expanded, r#"b"""#);
+ }
+}
--- /dev/null
+//! Defines database & queries for macro expansion.
+
+use std::sync::Arc;
+
+use base_db::{salsa, SourceDatabase};
+use mbe::{ExpandResult, MacroRules};
+use parser::FragmentKind;
+use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode};
+
+use crate::{
+ ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId,
+ HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind,
+ MacroFile, ProcMacroExpander,
+};
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum TokenExpander {
+ MacroRules(mbe::MacroRules),
+ Builtin(BuiltinFnLikeExpander),
+ BuiltinDerive(BuiltinDeriveExpander),
+ ProcMacro(ProcMacroExpander),
+}
+
+impl TokenExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: LazyMacroId,
+ tt: &tt::Subtree,
+ ) -> mbe::ExpandResult<tt::Subtree> {
+ match self {
+ TokenExpander::MacroRules(it) => it.expand(tt),
+ // FIXME switch these to ExpandResult as well
+ TokenExpander::Builtin(it) => it.expand(db, id, tt).into(),
+ TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
+ TokenExpander::ProcMacro(_) => {
+ // We store the result in salsa db to prevent non-determinisc behavior in
+ // some proc-macro implementation
+ // See #4315 for details
+ db.expand_proc_macro(id.into()).into()
+ }
+ }
+ }
+
+ pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
+ match self {
+ TokenExpander::MacroRules(it) => it.map_id_down(id),
+ TokenExpander::Builtin(..) => id,
+ TokenExpander::BuiltinDerive(..) => id,
+ TokenExpander::ProcMacro(..) => id,
+ }
+ }
+
+ pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
+ match self {
+ TokenExpander::MacroRules(it) => it.map_id_up(id),
+ TokenExpander::Builtin(..) => (id, mbe::Origin::Call),
+ TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call),
+ TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
+ }
+ }
+}
+
+// FIXME: rename to ExpandDatabase
+#[salsa::query_group(AstDatabaseStorage)]
+pub trait AstDatabase: SourceDatabase {
+ fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
+
+ #[salsa::transparent]
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
+
+ #[salsa::interned]
+ fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId;
+ fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
+ #[salsa::transparent]
+ fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
+ fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
+ fn parse_macro(&self, macro_file: MacroFile)
+ -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
+ fn macro_expand(&self, macro_call: MacroCallId) -> (Option<Arc<tt::Subtree>>, Option<String>);
+
+ #[salsa::interned]
+ fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
+
+ fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>;
+}
+
+/// This expands the given macro call, but with different arguments. This is
+/// used for completion, where we want to see what 'would happen' if we insert a
+/// token. The `token_to_map` mapped down into the expansion, with the mapped
+/// token returned.
+pub fn expand_hypothetical(
+ db: &dyn AstDatabase,
+ actual_macro_call: MacroCallId,
+ hypothetical_args: &syntax::ast::TokenTree,
+ token_to_map: syntax::SyntaxToken,
+) -> Option<(SyntaxNode, syntax::SyntaxToken)> {
+ let macro_file = MacroFile { macro_call_id: actual_macro_call };
+ let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap();
+ let range =
+ token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
+ let token_id = tmap_1.token_by_range(range)?;
+ let macro_def = expander(db, actual_macro_call)?;
+ let (node, tmap_2) =
+ parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?;
+ let token_id = macro_def.0.map_id_down(token_id);
+ let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
+ let token = syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?;
+ Some((node.syntax_node(), token))
+}
+
+pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
+ let map =
+ db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it));
+ Arc::new(map)
+}
+
+pub(crate) fn macro_def(
+ db: &dyn AstDatabase,
+ id: MacroDefId,
+) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
+ match id.kind {
+ MacroDefKind::Declarative => {
+ let macro_call = id.ast_id?.to_node(db);
+ let arg = macro_call.token_tree()?;
+ let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
+ log::warn!("fail on macro_def to token tree: {:#?}", arg);
+ None
+ })?;
+ let rules = match MacroRules::parse(&tt) {
+ Ok(it) => it,
+ Err(err) => {
+ log::warn!("fail on macro_def parse: error: {:#?} {:#?}", err, tt);
+ return None;
+ }
+ };
+ Some(Arc::new((TokenExpander::MacroRules(rules), tmap)))
+ }
+ MacroDefKind::BuiltIn(expander) => {
+ Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default())))
+ }
+ MacroDefKind::BuiltInDerive(expander) => {
+ Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default())))
+ }
+ MacroDefKind::BuiltInEager(_) => None,
+ MacroDefKind::CustomDerive(expander) => {
+ Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default())))
+ }
+ }
+}
+
+pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
+ let id = match id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(_id) => {
+ // FIXME: support macro_arg for eager macro
+ return None;
+ }
+ };
+ let loc = db.lookup_intern_macro(id);
+ let arg = loc.kind.arg(db)?;
+ Some(arg.green().clone())
+}
+
+pub(crate) fn macro_arg(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
+ let arg = db.macro_arg_text(id)?;
+ let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?;
+ Some(Arc::new((tt, tmap)))
+}
+
+pub(crate) fn macro_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+) -> (Option<Arc<tt::Subtree>>, Option<String>) {
+ macro_expand_with_arg(db, id, None)
+}
+
+fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
+ let lazy_id = match id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(_id) => {
+ return None;
+ }
+ };
+
+ let loc = db.lookup_intern_macro(lazy_id);
+ let macro_rules = db.macro_def(loc.def)?;
+ Some(macro_rules)
+}
+
+fn macro_expand_with_arg(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
+) -> (Option<Arc<tt::Subtree>>, Option<String>) {
+ let lazy_id = match id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(id) => {
+ if arg.is_some() {
+ return (
+ None,
+ Some("hypothetical macro expansion not implemented for eager macro".to_owned()),
+ );
+ } else {
+ return (Some(db.lookup_intern_eager_expansion(id).subtree), None);
+ }
+ }
+ };
+
+ let loc = db.lookup_intern_macro(lazy_id);
+ let macro_arg = match arg.or_else(|| db.macro_arg(id)) {
+ Some(it) => it,
+ None => return (None, Some("Fail to args in to tt::TokenTree".into())),
+ };
+
+ let macro_rules = match db.macro_def(loc.def) {
+ Some(it) => it,
+ None => return (None, Some("Fail to find macro definition".into())),
+ };
+ let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0);
+ // Set a hard limit for the expanded tt
+ let count = tt.count();
+ if count > 65536 {
+ return (None, Some(format!("Total tokens count exceed limit : count = {}", count)));
+ }
+ (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e)))
+}
+
+pub(crate) fn expand_proc_macro(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+) -> Result<tt::Subtree, mbe::ExpandError> {
+ let lazy_id = match id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(_) => unreachable!(),
+ };
+
+ let loc = db.lookup_intern_macro(lazy_id);
+ let macro_arg = match db.macro_arg(id) {
+ Some(it) => it,
+ None => {
+ return Err(
+ tt::ExpansionError::Unknown("No arguments for proc-macro".to_string()).into()
+ )
+ }
+ };
+
+ let expander = match loc.def.kind {
+ MacroDefKind::CustomDerive(expander) => expander,
+ _ => unreachable!(),
+ };
+
+ expander.expand(db, lazy_id, ¯o_arg.0)
+}
+
+pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
+ match file_id.0 {
+ HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
+ HirFileIdRepr::MacroFile(macro_file) => {
+ db.parse_macro(macro_file).map(|(it, _)| it.syntax_node())
+ }
+ }
+}
+
+pub(crate) fn parse_macro(
+ db: &dyn AstDatabase,
+ macro_file: MacroFile,
+) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
+ parse_macro_with_arg(db, macro_file, None)
+}
+
+pub fn parse_macro_with_arg(
+ db: &dyn AstDatabase,
+ macro_file: MacroFile,
+ arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
+) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
+ let _p = profile::span("parse_macro_query");
+
+ let macro_call_id = macro_file.macro_call_id;
+ let (tt, err) = if let Some(arg) = arg {
+ macro_expand_with_arg(db, macro_call_id, Some(arg))
+ } else {
+ db.macro_expand(macro_call_id)
+ };
+ if let Some(err) = &err {
+ // Note:
+ // The final goal we would like to make all parse_macro success,
+ // such that the following log will not call anyway.
+ match macro_call_id {
+ MacroCallId::LazyMacro(id) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro(id);
+ let node = loc.kind.node(db);
+
+ // collect parent information for warning log
+ let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
+ it.file_id.call_node(db)
+ })
+ .map(|n| format!("{:#}", n.value))
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ log::warn!(
+ "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}",
+ err,
+ node.value,
+ parents
+ );
+ }
+ _ => {
+ log::warn!("fail on macro_parse: (reason: {})", err);
+ }
+ }
+ };
+ let tt = tt?;
+
+ let fragment_kind = to_fragment_kind(db, macro_call_id);
+
+ let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?;
+
+ if err.is_none() {
+ Some((parse, Arc::new(rev_token_map)))
+ } else {
+ // FIXME:
+ // In future, we should propagate the actual error with recovery information
+ // instead of ignore the error here.
+
+ // Safe check for recurisve identity macro
+ let node = parse.syntax_node();
+ let file: HirFileId = macro_file.into();
+ let call_node = file.call_node(db)?;
+
+ if !diff(&node, &call_node.value).is_empty() {
+ Some((parse, Arc::new(rev_token_map)))
+ } else {
+ None
+ }
+ }
+}
+
+/// Given a `MacroCallId`, return what `FragmentKind` it belongs to.
+/// FIXME: Not completed
+fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
+ let lazy_id = match id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(id) => {
+ return db.lookup_intern_eager_expansion(id).fragment;
+ }
+ };
+ let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value;
+
+ let parent = match syn.parent() {
+ Some(it) => it,
+ None => {
+ // FIXME:
+ // If it is root, which means the parent HirFile
+ // MacroKindFile must be non-items
+ // return expr now.
+ return FragmentKind::Expr;
+ }
+ };
+
+ match parent.kind() {
+ MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items,
+ ITEM_LIST => FragmentKind::Items,
+ LET_STMT => {
+ // FIXME: Handle Pattern
+ FragmentKind::Expr
+ }
+ // FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that
+ EXPR_STMT | BLOCK_EXPR => FragmentKind::Expr,
+ ARG_LIST => FragmentKind::Expr,
+ TRY_EXPR => FragmentKind::Expr,
+ TUPLE_EXPR => FragmentKind::Expr,
+ PAREN_EXPR => FragmentKind::Expr,
+
+ FOR_EXPR => FragmentKind::Expr,
+ PATH_EXPR => FragmentKind::Expr,
+ CLOSURE_EXPR => FragmentKind::Expr,
+ CONDITION => FragmentKind::Expr,
+ BREAK_EXPR => FragmentKind::Expr,
+ RETURN_EXPR => FragmentKind::Expr,
+ MATCH_EXPR => FragmentKind::Expr,
+ MATCH_ARM => FragmentKind::Expr,
+ MATCH_GUARD => FragmentKind::Expr,
+ RECORD_EXPR_FIELD => FragmentKind::Expr,
+ CALL_EXPR => FragmentKind::Expr,
+ INDEX_EXPR => FragmentKind::Expr,
+ METHOD_CALL_EXPR => FragmentKind::Expr,
+ AWAIT_EXPR => FragmentKind::Expr,
+ CAST_EXPR => FragmentKind::Expr,
+ REF_EXPR => FragmentKind::Expr,
+ PREFIX_EXPR => FragmentKind::Expr,
+ RANGE_EXPR => FragmentKind::Expr,
+ BIN_EXPR => FragmentKind::Expr,
+ _ => {
+ // Unknown , Just guess it is `Items`
+ FragmentKind::Items
+ }
+ }
+}
--- /dev/null
+//! Semantic errors and warnings.
+//!
+//! The `Diagnostic` trait defines a trait object which can represent any
+//! diagnostic.
+//!
+//! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating
+//! a `DiagnosticSink`, you supply a callback which can react to a `dyn
+//! Diagnostic` or to any concrete diagnostic (downcasting is sued internally).
+//!
+//! Because diagnostics store file offsets, it's a bad idea to store them
+//! directly in salsa. For this reason, every hir subsytem defines it's own
+//! strongly-typed closed set of diagnostics which use hir ids internally, are
+//! stored in salsa and do *not* implement the `Diagnostic` trait. Instead, a
+//! subsystem provides a separate, non-query-based API which can walk all stored
+//! values and transform them into instances of `Diagnostic`.
+
+use std::{any::Any, fmt};
+
+use syntax::SyntaxNodePtr;
+
+use crate::InFile;
+
+pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
+ fn message(&self) -> String;
+ /// Used in highlighting and related purposes
+ fn display_source(&self) -> InFile<SyntaxNodePtr>;
+ fn as_any(&self) -> &(dyn Any + Send + 'static);
+ fn is_experimental(&self) -> bool {
+ false
+ }
+}
+
+pub struct DiagnosticSink<'a> {
+ callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
+ filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
+ default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>,
+}
+
+impl<'a> DiagnosticSink<'a> {
+ pub fn push(&mut self, d: impl Diagnostic) {
+ let d: &dyn Diagnostic = &d;
+ self._push(d);
+ }
+
+ fn _push(&mut self, d: &dyn Diagnostic) {
+ for filter in &mut self.filters {
+ if !filter(d) {
+ return;
+ }
+ }
+ for cb in &mut self.callbacks {
+ match cb(d) {
+ Ok(()) => return,
+ Err(()) => (),
+ }
+ }
+ (self.default_callback)(d)
+ }
+}
+
+pub struct DiagnosticSinkBuilder<'a> {
+ callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
+ filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
+}
+
+impl<'a> DiagnosticSinkBuilder<'a> {
+ pub fn new() -> Self {
+ Self { callbacks: Vec::new(), filters: Vec::new() }
+ }
+
+ pub fn filter<F: FnMut(&dyn Diagnostic) -> bool + 'a>(mut self, cb: F) -> Self {
+ self.filters.push(Box::new(cb));
+ self
+ }
+
+ pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> Self {
+ let cb = move |diag: &dyn Diagnostic| match diag.as_any().downcast_ref::<D>() {
+ Some(d) => {
+ cb(d);
+ Ok(())
+ }
+ None => Err(()),
+ };
+ self.callbacks.push(Box::new(cb));
+ self
+ }
+
+ pub fn build<F: FnMut(&dyn Diagnostic) + 'a>(self, default_callback: F) -> DiagnosticSink<'a> {
+ DiagnosticSink {
+ callbacks: self.callbacks,
+ filters: self.filters,
+ default_callback: Box::new(default_callback),
+ }
+ }
+}
--- /dev/null
+//! Eager expansion related utils
+//!
+//! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and
+//! Its name resolution :
+//!
+//! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros,
+//! > which actually happens in practice too!) are resolved at the location of the "root" macro
+//! > that performs the eager expansion on its arguments.
+//! > If some name cannot be resolved at the eager expansion time it's considered unresolved,
+//! > even if becomes available later (e.g. from a glob import or other macro).
+//!
+//! > Eagerly expanded macros don't add anything to the module structure of the crate and
+//! > don't build any speculative module structures, i.e. they are expanded in a "flat"
+//! > way even if tokens in them look like modules.
+//!
+//! > In other words, it kinda works for simple cases for which it was originally intended,
+//! > and we need to live with it because it's available on stable and widely relied upon.
+//!
+//!
+//! See the full discussion : https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros
+
+use crate::{
+ ast::{self, AstNode},
+ db::AstDatabase,
+ EagerCallLoc, EagerMacroId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
+};
+
+use base_db::CrateId;
+use parser::FragmentKind;
+use std::sync::Arc;
+use syntax::{algo::SyntaxRewriter, SyntaxNode};
+
+pub fn expand_eager_macro(
+ db: &dyn AstDatabase,
+ krate: CrateId,
+ macro_call: InFile<ast::MacroCall>,
+ def: MacroDefId,
+ resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
+) -> Option<EagerMacroId> {
+ let args = macro_call.value.token_tree()?;
+ let parsed_args = mbe::ast_to_token_tree(&args)?.0;
+
+ // Note:
+ // When `lazy_expand` is called, its *parent* file must be already exists.
+ // Here we store an eager macro id for the argument expanded subtree here
+ // for that purpose.
+ let arg_id = db.intern_eager_expansion({
+ EagerCallLoc {
+ def,
+ fragment: FragmentKind::Expr,
+ subtree: Arc::new(parsed_args.clone()),
+ krate,
+ file_id: macro_call.file_id,
+ }
+ });
+ let arg_file_id: MacroCallId = arg_id.into();
+
+ let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr).ok()?.0;
+ let result = eager_macro_recur(
+ db,
+ InFile::new(arg_file_id.as_file(), parsed_args.syntax_node()),
+ krate,
+ resolver,
+ )?;
+ let subtree = to_subtree(&result)?;
+
+ if let MacroDefKind::BuiltInEager(eager) = def.kind {
+ let (subtree, fragment) = eager.expand(db, arg_id, &subtree).ok()?;
+ let eager = EagerCallLoc {
+ def,
+ fragment,
+ subtree: Arc::new(subtree),
+ krate,
+ file_id: macro_call.file_id,
+ };
+
+ Some(db.intern_eager_expansion(eager))
+ } else {
+ None
+ }
+}
+
+fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> {
+ let mut subtree = mbe::syntax_node_to_token_tree(node)?.0;
+ subtree.delimiter = None;
+ Some(subtree)
+}
+
+fn lazy_expand(
+ db: &dyn AstDatabase,
+ def: &MacroDefId,
+ macro_call: InFile<ast::MacroCall>,
+ krate: CrateId,
+) -> Option<InFile<SyntaxNode>> {
+ let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
+
+ let id: MacroCallId =
+ def.as_lazy_macro(db, krate, MacroCallKind::FnLike(macro_call.with_value(ast_id))).into();
+
+ db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node))
+}
+
+fn eager_macro_recur(
+ db: &dyn AstDatabase,
+ curr: InFile<SyntaxNode>,
+ krate: CrateId,
+ macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
+) -> Option<SyntaxNode> {
+ let original = curr.value.clone();
+
+ let children = curr.value.descendants().filter_map(ast::MacroCall::cast);
+ let mut rewriter = SyntaxRewriter::default();
+
+ // Collect replacement
+ for child in children {
+ let def: MacroDefId = macro_resolver(child.path()?)?;
+ let insert = match def.kind {
+ MacroDefKind::BuiltInEager(_) => {
+ let id: MacroCallId = expand_eager_macro(
+ db,
+ krate,
+ curr.with_value(child.clone()),
+ def,
+ macro_resolver,
+ )?
+ .into();
+ db.parse_or_expand(id.as_file())?
+ }
+ MacroDefKind::Declarative
+ | MacroDefKind::BuiltIn(_)
+ | MacroDefKind::BuiltInDerive(_)
+ | MacroDefKind::CustomDerive(_) => {
+ let expanded = lazy_expand(db, &def, curr.with_value(child.clone()), krate)?;
+ // replace macro inside
+ eager_macro_recur(db, expanded, krate, macro_resolver)?
+ }
+ };
+
+ rewriter.replace(child.syntax(), &insert);
+ }
+
+ let res = rewriter.rewrite(&original);
+ Some(res)
+}
--- /dev/null
+//! This modules handles hygiene information.
+//!
+//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
+//! this moment, this is horribly incomplete and handles only `$crate`.
+use base_db::CrateId;
+use either::Either;
+use syntax::ast;
+
+use crate::{
+ db::AstDatabase,
+ name::{AsName, Name},
+ HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind,
+};
+
+#[derive(Clone, Debug)]
+pub struct Hygiene {
+ // This is what `$crate` expands to
+ def_crate: Option<CrateId>,
+
+ // Indicate this is a local inner macro
+ local_inner: bool,
+}
+
+impl Hygiene {
+ pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
+ let (def_crate, local_inner) = match file_id.0 {
+ HirFileIdRepr::FileId(_) => (None, false),
+ HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id {
+ MacroCallId::LazyMacro(id) => {
+ let loc = db.lookup_intern_macro(id);
+ match loc.def.kind {
+ MacroDefKind::Declarative => (loc.def.krate, loc.def.local_inner),
+ MacroDefKind::BuiltIn(_) => (None, false),
+ MacroDefKind::BuiltInDerive(_) => (None, false),
+ MacroDefKind::BuiltInEager(_) => (None, false),
+ MacroDefKind::CustomDerive(_) => (None, false),
+ }
+ }
+ MacroCallId::EagerMacro(_id) => (None, false),
+ },
+ };
+ Hygiene { def_crate, local_inner }
+ }
+
+ pub fn new_unhygienic() -> Hygiene {
+ Hygiene { def_crate: None, local_inner: false }
+ }
+
+ // FIXME: this should just return name
+ pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> {
+ if let Some(def_crate) = self.def_crate {
+ if name_ref.text() == "$crate" {
+ return Either::Right(def_crate);
+ }
+ }
+ Either::Left(name_ref.as_name())
+ }
+
+ pub fn local_inner_macros(&self) -> Option<CrateId> {
+ if self.local_inner {
+ self.def_crate
+ } else {
+ None
+ }
+ }
+}
--- /dev/null
+//! `hir_expand` deals with macro expansion.
+//!
+//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
+//! tree originates not from the text of some `FileId`, but from some macro
+//! expansion.
+
+pub mod db;
+pub mod ast_id_map;
+pub mod name;
+pub mod hygiene;
+pub mod diagnostics;
+pub mod builtin_derive;
+pub mod builtin_macro;
+pub mod proc_macro;
+pub mod quote;
+pub mod eager;
+
+use std::hash::Hash;
+use std::sync::Arc;
+
+use base_db::{impl_intern_key, salsa, CrateId, FileId};
+use syntax::{
+ algo,
+ ast::{self, AstNode},
+ SyntaxNode, SyntaxToken, TextSize,
+};
+
+use crate::ast_id_map::FileAstId;
+use crate::builtin_derive::BuiltinDeriveExpander;
+use crate::builtin_macro::{BuiltinFnLikeExpander, EagerExpander};
+use crate::proc_macro::ProcMacroExpander;
+
+#[cfg(test)]
+mod test_db;
+
+/// Input to the analyzer is a set of files, where each file is identified by
+/// `FileId` and contains source code. However, another source of source code in
+/// Rust are macros: each macro can be thought of as producing a "temporary
+/// file". To assign an id to such a file, we use the id of the macro call that
+/// produced the file. So, a `HirFileId` is either a `FileId` (source code
+/// written by user), or a `MacroCallId` (source code produced by macro).
+///
+/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
+/// containing the call plus the offset of the macro call in the file. Note that
+/// this is a recursive definition! However, the size_of of `HirFileId` is
+/// finite (because everything bottoms out at the real `FileId`) and small
+/// (`MacroCallId` uses the location interning. You can check details here:
+/// https://en.wikipedia.org/wiki/String_interning).
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct HirFileId(HirFileIdRepr);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+enum HirFileIdRepr {
+ FileId(FileId),
+ MacroFile(MacroFile),
+}
+
+impl From<FileId> for HirFileId {
+ fn from(id: FileId) -> Self {
+ HirFileId(HirFileIdRepr::FileId(id))
+ }
+}
+
+impl From<MacroFile> for HirFileId {
+ fn from(id: MacroFile) -> Self {
+ HirFileId(HirFileIdRepr::MacroFile(id))
+ }
+}
+
+impl HirFileId {
+ /// For macro-expansion files, returns the file original source file the
+ /// expansion originated from.
+ pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
+ match self.0 {
+ HirFileIdRepr::FileId(file_id) => file_id,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let file_id = match macro_file.macro_call_id {
+ MacroCallId::LazyMacro(id) => {
+ let loc = db.lookup_intern_macro(id);
+ loc.kind.file_id()
+ }
+ MacroCallId::EagerMacro(id) => {
+ let loc = db.lookup_intern_eager_expansion(id);
+ loc.file_id
+ }
+ };
+ file_id.original_file(db)
+ }
+ }
+ }
+
+ pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
+ let mut level = 0;
+ let mut curr = self;
+ while let HirFileIdRepr::MacroFile(macro_file) = curr.0 {
+ level += 1;
+ curr = match macro_file.macro_call_id {
+ MacroCallId::LazyMacro(id) => {
+ let loc = db.lookup_intern_macro(id);
+ loc.kind.file_id()
+ }
+ MacroCallId::EagerMacro(id) => {
+ let loc = db.lookup_intern_eager_expansion(id);
+ loc.file_id
+ }
+ };
+ }
+ level
+ }
+
+ /// If this is a macro call, returns the syntax node of the call.
+ pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let lazy_id = match macro_file.macro_call_id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(_id) => {
+ // FIXME: handle call node for eager macro
+ return None;
+ }
+ };
+ let loc = db.lookup_intern_macro(lazy_id);
+ Some(loc.kind.node(db))
+ }
+ }
+ }
+
+ /// Return expansion information if it is a macro-expansion file
+ pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let lazy_id = match macro_file.macro_call_id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(_id) => {
+ // FIXME: handle expansion_info for eager macro
+ return None;
+ }
+ };
+ let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id);
+
+ let arg_tt = loc.kind.arg(db)?;
+ let def_tt = loc.def.ast_id?.to_node(db).token_tree()?;
+
+ let macro_def = db.macro_def(loc.def)?;
+ let (parse, exp_map) = db.parse_macro(macro_file)?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
+ Some(ExpansionInfo {
+ expanded: InFile::new(self, parse.syntax_node()),
+ arg: InFile::new(loc.kind.file_id(), arg_tt),
+ def: InFile::new(loc.def.ast_id?.file_id, def_tt),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+ }
+ }
+ }
+
+ /// Indicate it is macro file generated for builtin derive
+ pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Item>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let lazy_id = match macro_file.macro_call_id {
+ MacroCallId::LazyMacro(id) => id,
+ MacroCallId::EagerMacro(_id) => {
+ return None;
+ }
+ };
+ let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id);
+ let item = match loc.def.kind {
+ MacroDefKind::BuiltInDerive(_) => loc.kind.node(db),
+ _ => return None,
+ };
+ Some(item.with_value(ast::Item::cast(item.value.clone())?))
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroFile {
+ macro_call_id: MacroCallId,
+}
+
+/// `MacroCallId` identifies a particular macro invocation, like
+/// `println!("Hello, {}", world)`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroCallId {
+ LazyMacro(LazyMacroId),
+ EagerMacro(EagerMacroId),
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LazyMacroId(salsa::InternId);
+impl_intern_key!(LazyMacroId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct EagerMacroId(salsa::InternId);
+impl_intern_key!(EagerMacroId);
+
+impl From<LazyMacroId> for MacroCallId {
+ fn from(it: LazyMacroId) -> Self {
+ MacroCallId::LazyMacro(it)
+ }
+}
+impl From<EagerMacroId> for MacroCallId {
+ fn from(it: EagerMacroId) -> Self {
+ MacroCallId::EagerMacro(it)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroDefId {
+ // FIXME: krate and ast_id are currently optional because we don't have a
+ // definition location for built-in derives. There is one, though: the
+ // standard library defines them. The problem is that it uses the new
+ // `macro` syntax for this, which we don't support yet. As soon as we do
+ // (which will probably require touching this code), we can instead use
+ // that (and also remove the hacks for resolving built-in derives).
+ pub krate: Option<CrateId>,
+ pub ast_id: Option<AstId<ast::MacroCall>>,
+ pub kind: MacroDefKind,
+
+ pub local_inner: bool,
+}
+
+impl MacroDefId {
+ pub fn as_lazy_macro(
+ self,
+ db: &dyn db::AstDatabase,
+ krate: CrateId,
+ kind: MacroCallKind,
+ ) -> LazyMacroId {
+ db.intern_macro(MacroCallLoc { def: self, krate, kind })
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroDefKind {
+ Declarative,
+ BuiltIn(BuiltinFnLikeExpander),
+ // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
+ BuiltInDerive(BuiltinDeriveExpander),
+ BuiltInEager(EagerExpander),
+ CustomDerive(ProcMacroExpander),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCallLoc {
+ pub(crate) def: MacroDefId,
+ pub(crate) krate: CrateId,
+ pub(crate) kind: MacroCallKind,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum MacroCallKind {
+ FnLike(AstId<ast::MacroCall>),
+ Attr(AstId<ast::Item>, String),
+}
+
+impl MacroCallKind {
+ fn file_id(&self) -> HirFileId {
+ match self {
+ MacroCallKind::FnLike(ast_id) => ast_id.file_id,
+ MacroCallKind::Attr(ast_id, _) => ast_id.file_id,
+ }
+ }
+
+ fn node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()),
+ MacroCallKind::Attr(ast_id, _) => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ }
+ }
+
+ fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike(ast_id) => {
+ Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+ }
+ MacroCallKind::Attr(ast_id, _) => Some(ast_id.to_node(db).syntax().clone()),
+ }
+ }
+}
+
+impl MacroCallId {
+ pub fn as_file(self) -> HirFileId {
+ MacroFile { macro_call_id: self }.into()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct EagerCallLoc {
+ pub(crate) def: MacroDefId,
+ pub(crate) fragment: FragmentKind,
+ pub(crate) subtree: Arc<tt::Subtree>,
+ pub(crate) krate: CrateId,
+ pub(crate) file_id: HirFileId,
+}
+
+/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExpansionInfo {
+ expanded: InFile<SyntaxNode>,
+ arg: InFile<SyntaxNode>,
+ def: InFile<ast::TokenTree>,
+
+ macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
+ exp_map: Arc<mbe::TokenMap>,
+}
+
+pub use mbe::Origin;
+use parser::FragmentKind;
+
+impl ExpansionInfo {
+ pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
+ Some(self.arg.with_value(self.arg.value.parent()?))
+ }
+
+ pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<SyntaxToken>> {
+ assert_eq!(token.file_id, self.arg.file_id);
+ let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
+ let token_id = self.macro_arg.1.token_by_range(range)?;
+ let token_id = self.macro_def.0.map_id_down(token_id);
+
+ let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
+
+ let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
+
+ Some(self.expanded.with_value(token))
+ }
+
+ pub fn map_token_up(
+ &self,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<(InFile<SyntaxToken>, Origin)> {
+ let token_id = self.exp_map.token_by_range(token.value.text_range())?;
+
+ let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
+ let (token_map, tt) = match origin {
+ mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
+ mbe::Origin::Def => {
+ (&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone()))
+ }
+ };
+
+ let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
+ let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
+ .into_token()?;
+ Some((tt.with_value(token), origin))
+ }
+}
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+// FIXME: isn't this just a `Source<FileAstId<N>>` ?
+pub type AstId<N> = InFile<FileAstId<N>>;
+
+impl<N: AstNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
+ let root = db.parse_or_expand(self.file_id).unwrap();
+ db.ast_id_map(self.file_id).get(self.value).to_node(&root)
+ }
+}
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFile<T> {
+ pub file_id: HirFileId,
+ pub value: T,
+}
+
+impl<T> InFile<T> {
+ pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
+ InFile { file_id, value }
+ }
+
+ // Similarly, naming here is stupid...
+ pub fn with_value<U>(&self, value: U) -> InFile<U> {
+ InFile::new(self.file_id, value)
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
+ InFile::new(self.file_id, f(self.value))
+ }
+ pub fn as_ref(&self) -> InFile<&T> {
+ self.with_value(&self.value)
+ }
+ pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
+ db.parse_or_expand(self.file_id).expect("source created from invalid file")
+ }
+}
+
+impl<T: Clone> InFile<&T> {
+ pub fn cloned(&self) -> InFile<T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> InFile<Option<T>> {
+ pub fn transpose(self) -> Option<InFile<T>> {
+ let value = self.value?;
+ Some(InFile::new(self.file_id, value))
+ }
+}
+
+impl InFile<SyntaxNode> {
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ std::iter::successors(Some(self), move |node| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let parent_node = node.file_id.call_node(db)?;
+ Some(parent_node)
+ }
+ })
+ }
+}
+
+impl InFile<SyntaxToken> {
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ self.map(|it| it.parent()).ancestors_with_macros(db)
+ }
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
+ self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
+ }
+
+ pub fn syntax(&self) -> InFile<&SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
--- /dev/null
+//! FIXME: write short doc here
+
+use std::fmt;
+
+use syntax::{ast, SmolStr};
+
+/// `Name` is a wrapper around string, which is used in hir for both references
+/// and declarations. In theory, names should also carry hygiene info, but we are
+/// not there yet!
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Name(Repr);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+enum Repr {
+ Text(SmolStr),
+ TupleField(usize),
+}
+
+impl fmt::Display for Name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match &self.0 {
+ Repr::Text(text) => fmt::Display::fmt(&text, f),
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+impl Name {
+ /// Note: this is private to make creating name from random string hard.
+ /// Hopefully, this should allow us to integrate hygiene cleaner in the
+ /// future, and to switch to interned representation of names.
+ const fn new_text(text: SmolStr) -> Name {
+ Name(Repr::Text(text))
+ }
+
+ pub fn new_tuple_field(idx: usize) -> Name {
+ Name(Repr::TupleField(idx))
+ }
+
+ pub fn new_lifetime(lt: &syntax::SyntaxToken) -> Name {
+ assert!(lt.kind() == syntax::SyntaxKind::LIFETIME);
+ Name(Repr::Text(lt.text().clone()))
+ }
+
+ /// Shortcut to create inline plain text name
+ const fn new_inline_ascii(text: &[u8]) -> Name {
+ Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text))
+ }
+
+ /// Resolve a name from the text of token.
+ fn resolve(raw_text: &SmolStr) -> Name {
+ let raw_start = "r#";
+ if raw_text.as_str().starts_with(raw_start) {
+ Name::new_text(SmolStr::new(&raw_text[raw_start.len()..]))
+ } else {
+ Name::new_text(raw_text.clone())
+ }
+ }
+
+ pub fn missing() -> Name {
+ Name::new_text("[missing name]".into())
+ }
+
+ pub fn as_tuple_index(&self) -> Option<usize> {
+ match self.0 {
+ Repr::TupleField(idx) => Some(idx),
+ _ => None,
+ }
+ }
+}
+
+pub trait AsName {
+ fn as_name(&self) -> Name;
+}
+
+impl AsName for ast::NameRef {
+ fn as_name(&self) -> Name {
+ match self.as_tuple_field() {
+ Some(idx) => Name::new_tuple_field(idx),
+ None => Name::resolve(self.text()),
+ }
+ }
+}
+
+impl AsName for ast::Name {
+ fn as_name(&self) -> Name {
+ Name::resolve(self.text())
+ }
+}
+
+impl AsName for ast::NameOrNameRef {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::NameOrNameRef::Name(it) => it.as_name(),
+ ast::NameOrNameRef::NameRef(it) => it.as_name(),
+ }
+ }
+}
+
+impl AsName for tt::Ident {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text)
+ }
+}
+
+impl AsName for ast::FieldKind {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::FieldKind::Name(nr) => nr.as_name(),
+ ast::FieldKind::Index(idx) => {
+ let idx = idx.text().parse::<usize>().unwrap_or(0);
+ Name::new_tuple_field(idx)
+ }
+ }
+ }
+}
+
+impl AsName for base_db::Dependency {
+ fn as_name(&self) -> Name {
+ Name::new_text(SmolStr::new(&*self.name))
+ }
+}
+
+pub mod known {
+ macro_rules! known_names {
+ ($($ident:ident),* $(,)?) => {
+ $(
+ #[allow(bad_style)]
+ pub const $ident: super::Name =
+ super::Name::new_inline_ascii(stringify!($ident).as_bytes());
+ )*
+ };
+ }
+
+ known_names!(
+ // Primitives
+ isize,
+ i8,
+ i16,
+ i32,
+ i64,
+ i128,
+ usize,
+ u8,
+ u16,
+ u32,
+ u64,
+ u128,
+ f32,
+ f64,
+ bool,
+ char,
+ str,
+ // Special names
+ macro_rules,
+ doc,
+ // Components of known path (value or mod name)
+ std,
+ core,
+ alloc,
+ iter,
+ ops,
+ future,
+ result,
+ boxed,
+ // Components of known path (type name)
+ IntoIterator,
+ Item,
+ Try,
+ Ok,
+ Future,
+ Result,
+ Output,
+ Target,
+ Box,
+ RangeFrom,
+ RangeFull,
+ RangeInclusive,
+ RangeToInclusive,
+ RangeTo,
+ Range,
+ Neg,
+ Not,
+ Index,
+ // Builtin macros
+ file,
+ column,
+ compile_error,
+ line,
+ assert,
+ stringify,
+ concat,
+ include,
+ include_bytes,
+ include_str,
+ format_args,
+ format_args_nl,
+ env,
+ option_env,
+ // Builtin derives
+ Copy,
+ Clone,
+ Default,
+ Debug,
+ Hash,
+ Ord,
+ PartialOrd,
+ Eq,
+ PartialEq,
+ );
+
+ // self/Self cannot be used as an identifier
+ pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self");
+ pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self");
+
+ #[macro_export]
+ macro_rules! name {
+ (self) => {
+ $crate::name::known::SELF_PARAM
+ };
+ (Self) => {
+ $crate::name::known::SELF_TYPE
+ };
+ ($ident:ident) => {
+ $crate::name::known::$ident
+ };
+ }
+}
+
+pub use crate::name;
--- /dev/null
+//! Proc Macro Expander stub
+
+use crate::{db::AstDatabase, LazyMacroId};
+use base_db::{CrateId, ProcMacroId};
+use tt::buffer::{Cursor, TokenBuffer};
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
+pub struct ProcMacroExpander {
+ krate: CrateId,
+ proc_macro_id: ProcMacroId,
+}
+
+macro_rules! err {
+ ($fmt:literal, $($tt:tt),*) => {
+ mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown(format!($fmt, $($tt),*)))
+ };
+ ($fmt:literal) => {
+ mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown($fmt.to_string()))
+ }
+}
+
+impl ProcMacroExpander {
+ pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> ProcMacroExpander {
+ ProcMacroExpander { krate, proc_macro_id }
+ }
+
+ pub fn expand(
+ self,
+ db: &dyn AstDatabase,
+ _id: LazyMacroId,
+ tt: &tt::Subtree,
+ ) -> Result<tt::Subtree, mbe::ExpandError> {
+ let krate_graph = db.crate_graph();
+ let proc_macro = krate_graph[self.krate]
+ .proc_macro
+ .get(self.proc_macro_id.0 as usize)
+ .clone()
+ .ok_or_else(|| err!("No derive macro found."))?;
+
+ let tt = remove_derive_attrs(tt)
+ .ok_or_else(|| err!("Fail to remove derive for custom derive"))?;
+
+ proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from)
+ }
+}
+
+fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() {
+ if punct.char == c {
+ *cursor = cursor.bump();
+ return true;
+ }
+ }
+ false
+}
+
+fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
+ if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() {
+ if Some(kind) == subtree.delimiter_kind() {
+ *cursor = cursor.bump_subtree();
+ return true;
+ }
+ }
+ false
+}
+
+fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() {
+ if t == ident.text.as_str() {
+ *cursor = cursor.bump();
+ return true;
+ }
+ }
+ false
+}
+
+fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
+ let buffer = TokenBuffer::new(&tt.token_trees);
+ let mut p = buffer.begin();
+ let mut result = tt::Subtree::default();
+
+ while !p.eof() {
+ let curr = p;
+
+ if eat_punct(&mut p, '#') {
+ eat_punct(&mut p, '!');
+ let parent = p;
+ if eat_subtree(&mut p, tt::DelimiterKind::Bracket) {
+ if eat_ident(&mut p, "derive") {
+ p = parent.bump();
+ continue;
+ }
+ }
+ }
+
+ result.token_trees.push(curr.token_tree()?.clone());
+ p = curr.bump();
+ }
+
+ Some(result)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use test_utils::assert_eq_text;
+
+ #[test]
+ fn test_remove_derive_attrs() {
+ let tt = mbe::parse_to_token_tree(
+ r#"
+ #[allow(unused)]
+ #[derive(Copy)]
+ #[derive(Hello)]
+ struct A {
+ bar: u32
+ }
+"#,
+ )
+ .unwrap()
+ .0;
+ let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap());
+
+ assert_eq_text!(
+ &result,
+ r#"
+SUBTREE $
+ PUNCH # [alone] 0
+ SUBTREE [] 1
+ IDENT allow 2
+ SUBTREE () 3
+ IDENT unused 4
+ IDENT struct 15
+ IDENT A 16
+ SUBTREE {} 17
+ IDENT bar 18
+ PUNCH : [alone] 19
+ IDENT u32 20
+"#
+ .trim()
+ );
+ }
+}
--- /dev/null
+//! A simplified version of quote-crate like quasi quote macro
+
+// A helper macro quote macro
+// FIXME:
+// 1. Not all puncts are handled
+// 2. #()* pattern repetition not supported now
+// * But we can do it manually, see `test_quote_derive_copy_hack`
+#[doc(hidden)]
+#[macro_export]
+macro_rules! __quote {
+ () => {
+ Vec::<tt::TokenTree>::new()
+ };
+
+ ( @SUBTREE $delim:ident $($tt:tt)* ) => {
+ {
+ let children = $crate::__quote!($($tt)*);
+ tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::$delim,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: $crate::quote::IntoTt::to_tokens(children),
+ }
+ }
+ };
+
+ ( @PUNCT $first:literal ) => {
+ {
+ vec![
+ tt::Leaf::Punct(tt::Punct {
+ char: $first,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }).into()
+ ]
+ }
+ };
+
+ ( @PUNCT $first:literal, $sec:literal ) => {
+ {
+ vec![
+ tt::Leaf::Punct(tt::Punct {
+ char: $first,
+ spacing: tt::Spacing::Joint,
+ id: tt::TokenId::unspecified(),
+ }).into(),
+ tt::Leaf::Punct(tt::Punct {
+ char: $sec,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }).into()
+ ]
+ }
+ };
+
+ // hash variable
+ ( # $first:ident $($tail:tt)* ) => {
+ {
+ let token = $crate::quote::ToTokenTree::to_token($first);
+ let mut tokens = vec![token.into()];
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+
+ ( ## $first:ident $($tail:tt)* ) => {
+ {
+ let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+
+ // Brace
+ ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
+ // Bracket
+ ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
+ // Parenthesis
+ ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
+
+ // Literal
+ ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
+ // Ident
+ ( $tt:ident ) => {
+ vec![ {
+ tt::Leaf::Ident(tt::Ident {
+ text: stringify!($tt).into(),
+ id: tt::TokenId::unspecified(),
+ }).into()
+ }]
+ };
+
+ // Puncts
+ // FIXME: Not all puncts are handled
+ ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
+ ( & ) => {$crate::__quote!(@PUNCT '&')};
+ ( , ) => {$crate::__quote!(@PUNCT ',')};
+ ( : ) => {$crate::__quote!(@PUNCT ':')};
+ ( ; ) => {$crate::__quote!(@PUNCT ';')};
+ ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
+ ( . ) => {$crate::__quote!(@PUNCT '.')};
+ ( < ) => {$crate::__quote!(@PUNCT '<')};
+ ( > ) => {$crate::__quote!(@PUNCT '>')};
+
+ ( $first:tt $($tail:tt)+ ) => {
+ {
+ let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+}
+
+/// FIXME:
+/// It probably should implement in proc-macro
+#[macro_export]
+macro_rules! quote {
+ ( $($tt:tt)* ) => {
+ $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
+ }
+}
+
+pub(crate) trait IntoTt {
+ fn to_subtree(self) -> tt::Subtree;
+ fn to_tokens(self) -> Vec<tt::TokenTree>;
+}
+
+impl IntoTt for Vec<tt::TokenTree> {
+ fn to_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self }
+ }
+
+ fn to_tokens(self) -> Vec<tt::TokenTree> {
+ self
+ }
+}
+
+impl IntoTt for tt::Subtree {
+ fn to_subtree(self) -> tt::Subtree {
+ self
+ }
+
+ fn to_tokens(self) -> Vec<tt::TokenTree> {
+ vec![tt::TokenTree::Subtree(self)]
+ }
+}
+
+pub(crate) trait ToTokenTree {
+ fn to_token(self) -> tt::TokenTree;
+}
+
+impl ToTokenTree for tt::TokenTree {
+ fn to_token(self) -> tt::TokenTree {
+ self
+ }
+}
+
+impl ToTokenTree for tt::Subtree {
+ fn to_token(self) -> tt::TokenTree {
+ self.into()
+ }
+}
+
+macro_rules! impl_to_to_tokentrees {
+ ($($ty:ty => $this:ident $im:block);*) => {
+ $(
+ impl ToTokenTree for $ty {
+ fn to_token($this) -> tt::TokenTree {
+ let leaf: tt::Leaf = $im.into();
+ leaf.into()
+ }
+ }
+
+ impl ToTokenTree for &$ty {
+ fn to_token($this) -> tt::TokenTree {
+ let leaf: tt::Leaf = $im.clone().into();
+ leaf.into()
+ }
+ }
+ )*
+ }
+}
+
+impl_to_to_tokentrees! {
+ u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
+ i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
+ tt::Leaf => self { self };
+ tt::Literal => self { self };
+ tt::Ident => self { self };
+ tt::Punct => self { self };
+ &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
+ String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn test_quote_delimiters() {
+ assert_eq!(quote!({}).to_string(), "{}");
+ assert_eq!(quote!(()).to_string(), "()");
+ assert_eq!(quote!([]).to_string(), "[]");
+ }
+
+ #[test]
+ fn test_quote_idents() {
+ assert_eq!(quote!(32).to_string(), "32");
+ assert_eq!(quote!(struct).to_string(), "struct");
+ }
+
+ #[test]
+ fn test_quote_hash_simple_literal() {
+ let a = 20;
+ assert_eq!(quote!(#a).to_string(), "20");
+ let s: String = "hello".into();
+ assert_eq!(quote!(#s).to_string(), "\"hello\"");
+ }
+
+ fn mk_ident(name: &str) -> tt::Ident {
+ tt::Ident { text: name.into(), id: tt::TokenId::unspecified() }
+ }
+
+ #[test]
+ fn test_quote_hash_token_tree() {
+ let a = mk_ident("hello");
+
+ let quoted = quote!(#a);
+ assert_eq!(quoted.to_string(), "hello");
+ let t = format!("{:?}", quoted);
+ assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295");
+ }
+
+ #[test]
+ fn test_quote_simple_derive_copy() {
+ let name = mk_ident("Foo");
+
+ let quoted = quote! {
+ impl Clone for #name {
+ fn clone(&self) -> Self {
+ Self {}
+ }
+ }
+ };
+
+ assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {}}}");
+ }
+
+ #[test]
+ fn test_quote_derive_copy_hack() {
+ // Assume the given struct is:
+ // struct Foo {
+ // name: String,
+ // id: u32,
+ // }
+ let struct_name = mk_ident("Foo");
+ let fields = [mk_ident("name"), mk_ident("id")];
+ let fields = fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees).flatten();
+
+ let list = tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Brace,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: fields.collect(),
+ };
+
+ let quoted = quote! {
+ impl Clone for #struct_name {
+ fn clone(&self) -> Self {
+ Self #list
+ }
+ }
+ };
+
+ assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}");
+ }
+}
--- /dev/null
+//! Database used for testing `hir_expand`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate};
+use rustc_hash::FxHashSet;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ crate::db::AstDatabaseStorage
+)]
+#[derive(Default)]
+pub struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(anchor, path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
syntax = { path = "../syntax" }
base_db = { path = "../base_db" }
profile = { path = "../profile" }
-hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
+hir_expand = { path = "../hir_expand" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
impl MacroDef {
/// FIXME: right now, this just returns the root module of the crate that
/// defines this macro. The reasons for this is that macros are expanded
- /// early, in `ra_hir_expand`, where modules simply do not exist yet.
+ /// early, in `hir_expand`, where modules simply do not exist yet.
pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
let krate = self.id.krate?;
let module_id = db.crate_def_map(krate).root;
smallvec = "1.4.0"
stdx = { path = "../stdx" }
-
arena = { path = "../arena" }
base_db = { path = "../base_db" }
syntax = { path = "../syntax" }
profile = { path = "../profile" }
-hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
+hir_expand = { path = "../hir_expand" }
test_utils = { path = "../test_utils" }
mbe = { path = "../mbe" }
cfg = { path = "../cfg" }
+++ /dev/null
-[package]
-edition = "2018"
-name = "ra_hir_expand"
-version = "0.1.0"
-authors = ["rust-analyzer developers"]
-license = "MIT OR Apache-2.0"
-
-[lib]
-doctest = false
-
-[dependencies]
-log = "0.4.8"
-either = "1.5.3"
-rustc-hash = "1.0.0"
-
-arena = { path = "../arena" }
-base_db = { path = "../base_db" }
-syntax = { path = "../syntax" }
-parser = { path = "../parser" }
-profile = { path = "../profile" }
-tt = { path = "../tt" }
-mbe = { path = "../mbe" }
-test_utils = { path = "../test_utils"}
+++ /dev/null
-//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
-//! and macro calls.
-//!
-//! Specifically, it enumerates all items in a file and uses position of a an
-//! item as an ID. That way, id's don't change unless the set of items itself
-//! changes.
-
-use std::{
- any::type_name,
- fmt,
- hash::{Hash, Hasher},
- marker::PhantomData,
-};
-
-use arena::{Arena, Idx};
-use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
-
-/// `AstId` points to an AST node in a specific file.
-pub struct FileAstId<N: AstNode> {
- raw: ErasedFileAstId,
- _ty: PhantomData<fn() -> N>,
-}
-
-impl<N: AstNode> Clone for FileAstId<N> {
- fn clone(&self) -> FileAstId<N> {
- *self
- }
-}
-impl<N: AstNode> Copy for FileAstId<N> {}
-
-impl<N: AstNode> PartialEq for FileAstId<N> {
- fn eq(&self, other: &Self) -> bool {
- self.raw == other.raw
- }
-}
-impl<N: AstNode> Eq for FileAstId<N> {}
-impl<N: AstNode> Hash for FileAstId<N> {
- fn hash<H: Hasher>(&self, hasher: &mut H) {
- self.raw.hash(hasher);
- }
-}
-
-impl<N: AstNode> fmt::Debug for FileAstId<N> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
- }
-}
-
-impl<N: AstNode> FileAstId<N> {
- // Can't make this a From implementation because of coherence
- pub fn upcast<M: AstNode>(self) -> FileAstId<M>
- where
- N: Into<M>,
- {
- FileAstId { raw: self.raw, _ty: PhantomData }
- }
-}
-
-type ErasedFileAstId = Idx<SyntaxNodePtr>;
-
-/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
-#[derive(Debug, PartialEq, Eq, Default)]
-pub struct AstIdMap {
- arena: Arena<SyntaxNodePtr>,
-}
-
-impl AstIdMap {
- pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
- assert!(node.parent().is_none());
- let mut res = AstIdMap { arena: Arena::default() };
- // By walking the tree in breadth-first order we make sure that parents
- // get lower ids then children. That is, adding a new child does not
- // change parent's id. This means that, say, adding a new function to a
- // trait does not change ids of top-level items, which helps caching.
- bfs(node, |it| {
- if let Some(module_item) = ast::Item::cast(it) {
- res.alloc(module_item.syntax());
- }
- });
- res
- }
-
- pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
- let raw = self.erased_ast_id(item.syntax());
- FileAstId { raw, _ty: PhantomData }
- }
- fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
- let ptr = SyntaxNodePtr::new(item);
- match self.arena.iter().find(|(_id, i)| **i == ptr) {
- Some((it, _)) => it,
- None => panic!(
- "Can't find {:?} in AstIdMap:\n{:?}",
- item,
- self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
- ),
- }
- }
-
- pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
- self.arena[id.raw].clone().cast::<N>().unwrap()
- }
-
- fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
- self.arena.alloc(SyntaxNodePtr::new(item))
- }
-}
-
-/// Walks the subtree in bfs order, calling `f` for each node.
-fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
- let mut curr_layer = vec![node.clone()];
- let mut next_layer = vec![];
- while !curr_layer.is_empty() {
- curr_layer.drain(..).for_each(|node| {
- next_layer.extend(node.children());
- f(node);
- });
- std::mem::swap(&mut curr_layer, &mut next_layer);
- }
-}
+++ /dev/null
-//! Builtin derives.
-
-use log::debug;
-
-use parser::FragmentKind;
-use syntax::{
- ast::{self, AstNode, GenericParamsOwner, ModuleItemOwner, NameOwner},
- match_ast,
-};
-
-use crate::{db::AstDatabase, name, quote, LazyMacroId, MacroDefId, MacroDefKind};
-
-macro_rules! register_builtin {
- ( $($trait:ident => $expand:ident),* ) => {
- #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
- pub enum BuiltinDeriveExpander {
- $($trait),*
- }
-
- impl BuiltinDeriveExpander {
- pub fn expand(
- &self,
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
- ) -> Result<tt::Subtree, mbe::ExpandError> {
- let expander = match *self {
- $( BuiltinDeriveExpander::$trait => $expand, )*
- };
- expander(db, id, tt)
- }
- }
-
- pub fn find_builtin_derive(ident: &name::Name) -> Option<MacroDefId> {
- let kind = match ident {
- $( id if id == &name::name![$trait] => BuiltinDeriveExpander::$trait, )*
- _ => return None,
- };
-
- Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind), local_inner: false })
- }
- };
-}
-
-register_builtin! {
- Copy => copy_expand,
- Clone => clone_expand,
- Default => default_expand,
- Debug => debug_expand,
- Hash => hash_expand,
- Ord => ord_expand,
- PartialOrd => partial_ord_expand,
- Eq => eq_expand,
- PartialEq => partial_eq_expand
-}
-
-struct BasicAdtInfo {
- name: tt::Ident,
- type_params: usize,
-}
-
-fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
- let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs?
- let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
- debug!("derive node didn't parse");
- mbe::ExpandError::UnexpectedToken
- })?;
- let item = macro_items.items().next().ok_or_else(|| {
- debug!("no module item parsed");
- mbe::ExpandError::NoMatchingRule
- })?;
- let node = item.syntax();
- let (name, params) = match_ast! {
- match node {
- ast::Struct(it) => (it.name(), it.generic_param_list()),
- ast::Enum(it) => (it.name(), it.generic_param_list()),
- ast::Union(it) => (it.name(), it.generic_param_list()),
- _ => {
- debug!("unexpected node is {:?}", node);
- return Err(mbe::ExpandError::ConversionError)
- },
- }
- };
- let name = name.ok_or_else(|| {
- debug!("parsed item has no name");
- mbe::ExpandError::NoMatchingRule
- })?;
- let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| {
- debug!("name token not found");
- mbe::ExpandError::ConversionError
- })?;
- let name_token = tt::Ident { id: name_token_id, text: name.text().clone() };
- let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
- Ok(BasicAdtInfo { name: name_token, type_params })
-}
-
-fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
- let mut result = Vec::<tt::TokenTree>::new();
- result.push(
- tt::Leaf::Punct(tt::Punct {
- char: '<',
- spacing: tt::Spacing::Alone,
- id: tt::TokenId::unspecified(),
- })
- .into(),
- );
- for i in 0..n {
- if i > 0 {
- result.push(
- tt::Leaf::Punct(tt::Punct {
- char: ',',
- spacing: tt::Spacing::Alone,
- id: tt::TokenId::unspecified(),
- })
- .into(),
- );
- }
- result.push(
- tt::Leaf::Ident(tt::Ident {
- id: tt::TokenId::unspecified(),
- text: format!("T{}", i).into(),
- })
- .into(),
- );
- result.extend(bound.iter().cloned());
- }
- result.push(
- tt::Leaf::Punct(tt::Punct {
- char: '>',
- spacing: tt::Spacing::Alone,
- id: tt::TokenId::unspecified(),
- })
- .into(),
- );
- result
-}
-
-fn expand_simple_derive(
- tt: &tt::Subtree,
- trait_path: tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let info = parse_adt(tt)?;
- let name = info.name;
- let trait_path_clone = trait_path.token_trees.clone();
- let bound = (quote! { : ##trait_path_clone }).token_trees;
- let type_params = make_type_args(info.type_params, bound);
- let type_args = make_type_args(info.type_params, Vec::new());
- let trait_path = trait_path.token_trees;
- let expanded = quote! {
- impl ##type_params ##trait_path for #name ##type_args {}
- };
- Ok(expanded)
-}
-
-fn find_builtin_crate(db: &dyn AstDatabase, id: LazyMacroId) -> tt::TokenTree {
- // FIXME: make hygiene works for builtin derive macro
- // such that $crate can be used here.
- let cg = db.crate_graph();
- let krate = db.lookup_intern_macro(id).krate;
-
- // XXX
- // All crates except core itself should have a dependency on core,
- // We detect `core` by seeing whether it doesn't have such a dependency.
- let tt = if cg[krate].dependencies.iter().any(|dep| &*dep.name == "core") {
- quote! { core }
- } else {
- quote! { crate }
- };
-
- tt.token_trees[0].clone()
-}
-
-fn copy_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::marker::Copy })
-}
-
-fn clone_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::clone::Clone })
-}
-
-fn default_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::default::Default })
-}
-
-fn debug_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::fmt::Debug })
-}
-
-fn hash_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::hash::Hash })
-}
-
-fn eq_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Eq })
-}
-
-fn partial_eq_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialEq })
-}
-
-fn ord_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Ord })
-}
-
-fn partial_ord_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd })
-}
-
-#[cfg(test)]
-mod tests {
- use base_db::{fixture::WithFixture, CrateId, SourceDatabase};
- use name::{known, Name};
-
- use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc};
-
- use super::*;
-
- fn expand_builtin_derive(s: &str, name: Name) -> String {
- let def = find_builtin_derive(&name).unwrap();
- let fixture = format!(
- r#"//- /main.rs crate:main deps:core
-<|>
-{}
-//- /lib.rs crate:core
-// empty
-"#,
- s
- );
-
- let (db, file_pos) = TestDB::with_position(&fixture);
- let file_id = file_pos.file_id;
- let parsed = db.parse(file_id);
- let items: Vec<_> =
- parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect();
-
- let ast_id_map = db.ast_id_map(file_id.into());
-
- let attr_id = AstId::new(file_id.into(), ast_id_map.ast_id(&items[0]));
-
- let loc = MacroCallLoc {
- def,
- krate: CrateId(0),
- kind: MacroCallKind::Attr(attr_id, name.to_string()),
- };
-
- let id: MacroCallId = db.intern_macro(loc).into();
- let parsed = db.parse_or_expand(id.as_file()).unwrap();
-
- // FIXME text() for syntax nodes parsed from token tree looks weird
- // because there's no whitespace, see below
- parsed.text().to_string()
- }
-
- #[test]
- fn test_copy_expand_simple() {
- let expanded = expand_builtin_derive(
- r#"
- #[derive(Copy)]
- struct Foo;
-"#,
- known::Copy,
- );
-
- assert_eq!(expanded, "impl< >core::marker::CopyforFoo< >{}");
- }
-
- #[test]
- fn test_copy_expand_with_type_params() {
- let expanded = expand_builtin_derive(
- r#"
- #[derive(Copy)]
- struct Foo<A, B>;
-"#,
- known::Copy,
- );
-
- assert_eq!(
- expanded,
- "impl<T0:core::marker::Copy,T1:core::marker::Copy>core::marker::CopyforFoo<T0,T1>{}"
- );
- }
-
- #[test]
- fn test_copy_expand_with_lifetimes() {
- let expanded = expand_builtin_derive(
- r#"
- #[derive(Copy)]
- struct Foo<A, B, 'a, 'b>;
-"#,
- known::Copy,
- );
-
- // We currently just ignore lifetimes
-
- assert_eq!(
- expanded,
- "impl<T0:core::marker::Copy,T1:core::marker::Copy>core::marker::CopyforFoo<T0,T1>{}"
- );
- }
-
- #[test]
- fn test_clone_expand() {
- let expanded = expand_builtin_derive(
- r#"
- #[derive(Clone)]
- struct Foo<A, B>;
-"#,
- known::Clone,
- );
-
- assert_eq!(
- expanded,
- "impl<T0:core::clone::Clone,T1:core::clone::Clone>core::clone::CloneforFoo<T0,T1>{}"
- );
- }
-}
+++ /dev/null
-//! Builtin macro
-use crate::{
- db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId,
- MacroDefId, MacroDefKind, TextSize,
-};
-
-use base_db::FileId;
-use either::Either;
-use mbe::parse_to_token_tree;
-use parser::FragmentKind;
-use syntax::ast::{self, AstToken, HasStringValue};
-
-macro_rules! register_builtin {
- ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
- #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
- pub enum BuiltinFnLikeExpander {
- $($kind),*
- }
-
- #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
- pub enum EagerExpander {
- $($e_kind),*
- }
-
- impl BuiltinFnLikeExpander {
- pub fn expand(
- &self,
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
- ) -> Result<tt::Subtree, mbe::ExpandError> {
- let expander = match *self {
- $( BuiltinFnLikeExpander::$kind => $expand, )*
- };
- expander(db, id, tt)
- }
- }
-
- impl EagerExpander {
- pub fn expand(
- &self,
- db: &dyn AstDatabase,
- arg_id: EagerMacroId,
- tt: &tt::Subtree,
- ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let expander = match *self {
- $( EagerExpander::$e_kind => $e_expand, )*
- };
- expander(db,arg_id,tt)
- }
- }
-
- fn find_by_name(ident: &name::Name) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> {
- match ident {
- $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )*
- $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )*
- _ => return None,
- }
- }
- };
-}
-
-pub fn find_builtin_macro(
- ident: &name::Name,
- krate: CrateId,
- ast_id: AstId<ast::MacroCall>,
-) -> Option<MacroDefId> {
- let kind = find_by_name(ident)?;
-
- match kind {
- Either::Left(kind) => Some(MacroDefId {
- krate: Some(krate),
- ast_id: Some(ast_id),
- kind: MacroDefKind::BuiltIn(kind),
- local_inner: false,
- }),
- Either::Right(kind) => Some(MacroDefId {
- krate: Some(krate),
- ast_id: Some(ast_id),
- kind: MacroDefKind::BuiltInEager(kind),
- local_inner: false,
- }),
- }
-}
-
-register_builtin! {
- LAZY:
- (column, Column) => column_expand,
- (compile_error, CompileError) => compile_error_expand,
- (file, File) => file_expand,
- (line, Line) => line_expand,
- (assert, Assert) => assert_expand,
- (stringify, Stringify) => stringify_expand,
- (format_args, FormatArgs) => format_args_expand,
- // format_args_nl only differs in that it adds a newline in the end,
- // so we use the same stub expansion for now
- (format_args_nl, FormatArgsNl) => format_args_expand,
-
- EAGER:
- (concat, Concat) => concat_expand,
- (include, Include) => include_expand,
- (include_bytes, IncludeBytes) => include_bytes_expand,
- (include_str, IncludeStr) => include_str_expand,
- (env, Env) => env_expand,
- (option_env, OptionEnv) => option_env_expand
-}
-
-fn line_expand(
- _db: &dyn AstDatabase,
- _id: LazyMacroId,
- _tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- // dummy implementation for type-checking purposes
- let line_num = 0;
- let expanded = quote! {
- #line_num
- };
-
- Ok(expanded)
-}
-
-fn stringify_expand(
- db: &dyn AstDatabase,
- id: LazyMacroId,
- _tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let loc = db.lookup_intern_macro(id);
-
- let macro_content = {
- let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
- let macro_args = arg;
- let text = macro_args.text();
- let without_parens = TextSize::of('(')..text.len() - TextSize::of(')');
- text.slice(without_parens).to_string()
- };
-
- let expanded = quote! {
- #macro_content
- };
-
- Ok(expanded)
-}
-
-fn column_expand(
- _db: &dyn AstDatabase,
- _id: LazyMacroId,
- _tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- // dummy implementation for type-checking purposes
- let col_num = 0;
- let expanded = quote! {
- #col_num
- };
-
- Ok(expanded)
-}
-
-fn assert_expand(
- _db: &dyn AstDatabase,
- _id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- // A hacky implementation for goto def and hover
- // We expand `assert!(cond, arg1, arg2)` to
- // ```
- // {(cond, &(arg1), &(arg2));}
- // ```,
- // which is wrong but useful.
-
- let mut args = Vec::new();
- let mut current = Vec::new();
- for tt in tt.token_trees.iter().cloned() {
- match tt {
- tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => {
- args.push(current);
- current = Vec::new();
- }
- _ => {
- current.push(tt);
- }
- }
- }
- if !current.is_empty() {
- args.push(current);
- }
-
- let arg_tts = args.into_iter().flat_map(|arg| {
- quote! { &(##arg), }
- }.token_trees).collect::<Vec<_>>();
-
- let expanded = quote! {
- { { (##arg_tts); } }
- };
- Ok(expanded)
-}
-
-fn file_expand(
- _db: &dyn AstDatabase,
- _id: LazyMacroId,
- _tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- // FIXME: RA purposefully lacks knowledge of absolute file names
- // so just return "".
- let file_name = "";
-
- let expanded = quote! {
- #file_name
- };
-
- Ok(expanded)
-}
-
-fn compile_error_expand(
- _db: &dyn AstDatabase,
- _id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- if tt.count() == 1 {
- if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] {
- let s = it.text.as_str();
- if s.contains('"') {
- return Ok(quote! { loop { #it }});
- }
- };
- }
-
- Err(mbe::ExpandError::BindingError("Must be a string".into()))
-}
-
-fn format_args_expand(
- _db: &dyn AstDatabase,
- _id: LazyMacroId,
- tt: &tt::Subtree,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- // We expand `format_args!("", a1, a2)` to
- // ```
- // std::fmt::Arguments::new_v1(&[], &[
- // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt),
- // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt),
- // ])
- // ```,
- // which is still not really correct, but close enough for now
- let mut args = Vec::new();
- let mut current = Vec::new();
- for tt in tt.token_trees.iter().cloned() {
- match tt {
- tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => {
- args.push(current);
- current = Vec::new();
- }
- _ => {
- current.push(tt);
- }
- }
- }
- if !current.is_empty() {
- args.push(current);
- }
- if args.is_empty() {
- return Err(mbe::ExpandError::NoMatchingRule);
- }
- let _format_string = args.remove(0);
- let arg_tts = args.into_iter().flat_map(|arg| {
- quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), }
- }.token_trees).collect::<Vec<_>>();
- let expanded = quote! {
- std::fmt::Arguments::new_v1(&[], &[##arg_tts])
- };
- Ok(expanded)
-}
-
-fn unquote_str(lit: &tt::Literal) -> Option<String> {
- let lit = ast::make::tokens::literal(&lit.to_string());
- let token = ast::String::cast(lit)?;
- token.value().map(|it| it.into_owned())
-}
-
-fn concat_expand(
- _db: &dyn AstDatabase,
- _arg_id: EagerMacroId,
- tt: &tt::Subtree,
-) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let mut text = String::new();
- for (i, t) in tt.token_trees.iter().enumerate() {
- match t {
- tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
- text += &unquote_str(&it).ok_or_else(|| mbe::ExpandError::ConversionError)?;
- }
- tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
- _ => return Err(mbe::ExpandError::UnexpectedToken),
- }
- }
-
- Ok((quote!(#text), FragmentKind::Expr))
-}
-
-fn relative_file(
- db: &dyn AstDatabase,
- call_id: MacroCallId,
- path: &str,
- allow_recursion: bool,
-) -> Option<FileId> {
- let call_site = call_id.as_file().original_file(db);
- let res = db.resolve_path(call_site, path)?;
- // Prevent include itself
- if res == call_site && !allow_recursion {
- None
- } else {
- Some(res)
- }
-}
-
-fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
- tt.token_trees
- .get(0)
- .and_then(|tt| match tt {
- tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it),
- _ => None,
- })
- .ok_or_else(|| mbe::ExpandError::ConversionError)
-}
-
-fn include_expand(
- db: &dyn AstDatabase,
- arg_id: EagerMacroId,
- tt: &tt::Subtree,
-) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let path = parse_string(tt)?;
- let file_id = relative_file(db, arg_id.into(), &path, false)
- .ok_or_else(|| mbe::ExpandError::ConversionError)?;
-
- // FIXME:
- // Handle include as expression
- let res = parse_to_token_tree(&db.file_text(file_id))
- .ok_or_else(|| mbe::ExpandError::ConversionError)?
- .0;
-
- Ok((res, FragmentKind::Items))
-}
-
-fn include_bytes_expand(
- _db: &dyn AstDatabase,
- _arg_id: EagerMacroId,
- tt: &tt::Subtree,
-) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let _path = parse_string(tt)?;
-
- // FIXME: actually read the file here if the user asked for macro expansion
- let res = tt::Subtree {
- delimiter: None,
- token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- text: r#"b"""#.into(),
- id: tt::TokenId::unspecified(),
- }))],
- };
- Ok((res, FragmentKind::Expr))
-}
-
-fn include_str_expand(
- db: &dyn AstDatabase,
- arg_id: EagerMacroId,
- tt: &tt::Subtree,
-) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let path = parse_string(tt)?;
-
- // FIXME: we're not able to read excluded files (which is most of them because
- // it's unusual to `include_str!` a Rust file), but we can return an empty string.
- // Ideally, we'd be able to offer a precise expansion if the user asks for macro
- // expansion.
- let file_id = match relative_file(db, arg_id.into(), &path, true) {
- Some(file_id) => file_id,
- None => {
- return Ok((quote!(""), FragmentKind::Expr));
- }
- };
-
- let text = db.file_text(file_id);
- let text = &*text;
-
- Ok((quote!(#text), FragmentKind::Expr))
-}
-
-fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Option<String> {
- let krate = db.lookup_intern_eager_expansion(arg_id).krate;
- db.crate_graph()[krate].env.get(key)
-}
-
-fn env_expand(
- db: &dyn AstDatabase,
- arg_id: EagerMacroId,
- tt: &tt::Subtree,
-) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let key = parse_string(tt)?;
-
- // FIXME:
- // If the environment variable is not defined int rustc, then a compilation error will be emitted.
- // We might do the same if we fully support all other stuffs.
- // But for now on, we should return some dummy string for better type infer purpose.
- // However, we cannot use an empty string here, because for
- // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
- // `include!("foo.rs"), which might go to infinite loop
- let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| "__RA_UNIMPLEMENTED__".to_string());
- let expanded = quote! { #s };
-
- Ok((expanded, FragmentKind::Expr))
-}
-
-fn option_env_expand(
- db: &dyn AstDatabase,
- arg_id: EagerMacroId,
- tt: &tt::Subtree,
-) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
- let key = parse_string(tt)?;
- let expanded = match get_env_inner(db, arg_id, &key) {
- None => quote! { std::option::Option::None::<&str> },
- Some(s) => quote! { std::option::Some(#s) },
- };
-
- Ok((expanded, FragmentKind::Expr))
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use crate::{
- name::AsName, test_db::TestDB, AstNode, EagerCallLoc, MacroCallId, MacroCallKind,
- MacroCallLoc,
- };
- use base_db::{fixture::WithFixture, SourceDatabase};
- use std::sync::Arc;
- use syntax::ast::NameOwner;
-
- fn expand_builtin_macro(ra_fixture: &str) -> String {
- let (db, file_id) = TestDB::with_single_file(&ra_fixture);
- let parsed = db.parse(file_id);
- let macro_calls: Vec<_> =
- parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect();
-
- let ast_id_map = db.ast_id_map(file_id.into());
-
- let expander = find_by_name(¯o_calls[0].name().unwrap().as_name()).unwrap();
-
- let krate = CrateId(0);
- let file_id = match expander {
- Either::Left(expander) => {
- // the first one should be a macro_rules
- let def = MacroDefId {
- krate: Some(CrateId(0)),
- ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))),
- kind: MacroDefKind::BuiltIn(expander),
- local_inner: false,
- };
-
- let loc = MacroCallLoc {
- def,
- krate,
- kind: MacroCallKind::FnLike(AstId::new(
- file_id.into(),
- ast_id_map.ast_id(¯o_calls[1]),
- )),
- };
-
- let id: MacroCallId = db.intern_macro(loc).into();
- id.as_file()
- }
- Either::Right(expander) => {
- // the first one should be a macro_rules
- let def = MacroDefId {
- krate: Some(krate),
- ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))),
- kind: MacroDefKind::BuiltInEager(expander),
- local_inner: false,
- };
-
- let args = macro_calls[1].token_tree().unwrap();
- let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0;
-
- let arg_id = db.intern_eager_expansion({
- EagerCallLoc {
- def,
- fragment: FragmentKind::Expr,
- subtree: Arc::new(parsed_args.clone()),
- krate,
- file_id: file_id.into(),
- }
- });
-
- let (subtree, fragment) = expander.expand(&db, arg_id, &parsed_args).unwrap();
- let eager = EagerCallLoc {
- def,
- fragment,
- subtree: Arc::new(subtree),
- krate,
- file_id: file_id.into(),
- };
-
- let id: MacroCallId = db.intern_eager_expansion(eager).into();
- id.as_file()
- }
- };
-
- db.parse_or_expand(file_id).unwrap().to_string()
- }
-
- #[test]
- fn test_column_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! column {() => {}}
- column!()
- "#,
- );
-
- assert_eq!(expanded, "0");
- }
-
- #[test]
- fn test_line_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! line {() => {}}
- line!()
- "#,
- );
-
- assert_eq!(expanded, "0");
- }
-
- #[test]
- fn test_stringify_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! stringify {() => {}}
- stringify!(a b c)
- "#,
- );
-
- assert_eq!(expanded, "\"a b c\"");
- }
-
- #[test]
- fn test_env_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! env {() => {}}
- env!("TEST_ENV_VAR")
- "#,
- );
-
- assert_eq!(expanded, "\"__RA_UNIMPLEMENTED__\"");
- }
-
- #[test]
- fn test_option_env_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! option_env {() => {}}
- option_env!("TEST_ENV_VAR")
- "#,
- );
-
- assert_eq!(expanded, "std::option::Option::None:: < &str>");
- }
-
- #[test]
- fn test_file_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! file {() => {}}
- file!()
- "#,
- );
-
- assert_eq!(expanded, "\"\"");
- }
-
- #[test]
- fn test_assert_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! assert {
- ($cond:expr) => ({ /* compiler built-in */ });
- ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
- }
- assert!(true, "{} {:?}", arg1(a, b, c), arg2);
- "#,
- );
-
- assert_eq!(expanded, "{{(&(true), &(\"{} {:?}\"), &(arg1(a,b,c)), &(arg2),);}}");
- }
-
- #[test]
- fn test_compile_error_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! compile_error {
- ($msg:expr) => ({ /* compiler built-in */ });
- ($msg:expr,) => ({ /* compiler built-in */ })
- }
- compile_error!("error!");
- "#,
- );
-
- assert_eq!(expanded, r#"loop{"error!"}"#);
- }
-
- #[test]
- fn test_format_args_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! format_args {
- ($fmt:expr) => ({ /* compiler built-in */ });
- ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
- }
- format_args!("{} {:?}", arg1(a, b, c), arg2);
- "#,
- );
-
- assert_eq!(
- expanded,
- r#"std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a,b,c)),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(arg2),std::fmt::Display::fmt),])"#
- );
- }
-
- #[test]
- fn test_include_bytes_expand() {
- let expanded = expand_builtin_macro(
- r#"
- #[rustc_builtin_macro]
- macro_rules! include_bytes {
- ($file:expr) => {{ /* compiler built-in */ }};
- ($file:expr,) => {{ /* compiler built-in */ }};
- }
- include_bytes("foo");
- "#,
- );
-
- assert_eq!(expanded, r#"b"""#);
- }
-}
+++ /dev/null
-//! Defines database & queries for macro expansion.
-
-use std::sync::Arc;
-
-use base_db::{salsa, SourceDatabase};
-use mbe::{ExpandResult, MacroRules};
-use parser::FragmentKind;
-use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode};
-
-use crate::{
- ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId,
- HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind,
- MacroFile, ProcMacroExpander,
-};
-
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub enum TokenExpander {
- MacroRules(mbe::MacroRules),
- Builtin(BuiltinFnLikeExpander),
- BuiltinDerive(BuiltinDeriveExpander),
- ProcMacro(ProcMacroExpander),
-}
-
-impl TokenExpander {
- pub fn expand(
- &self,
- db: &dyn AstDatabase,
- id: LazyMacroId,
- tt: &tt::Subtree,
- ) -> mbe::ExpandResult<tt::Subtree> {
- match self {
- TokenExpander::MacroRules(it) => it.expand(tt),
- // FIXME switch these to ExpandResult as well
- TokenExpander::Builtin(it) => it.expand(db, id, tt).into(),
- TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
- TokenExpander::ProcMacro(_) => {
- // We store the result in salsa db to prevent non-determinisc behavior in
- // some proc-macro implementation
- // See #4315 for details
- db.expand_proc_macro(id.into()).into()
- }
- }
- }
-
- pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- match self {
- TokenExpander::MacroRules(it) => it.map_id_down(id),
- TokenExpander::Builtin(..) => id,
- TokenExpander::BuiltinDerive(..) => id,
- TokenExpander::ProcMacro(..) => id,
- }
- }
-
- pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- match self {
- TokenExpander::MacroRules(it) => it.map_id_up(id),
- TokenExpander::Builtin(..) => (id, mbe::Origin::Call),
- TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call),
- TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
- }
- }
-}
-
-// FIXME: rename to ExpandDatabase
-#[salsa::query_group(AstDatabaseStorage)]
-pub trait AstDatabase: SourceDatabase {
- fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
-
- #[salsa::transparent]
- fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
-
- #[salsa::interned]
- fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId;
- fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
- #[salsa::transparent]
- fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
- fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
- fn parse_macro(&self, macro_file: MacroFile)
- -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
- fn macro_expand(&self, macro_call: MacroCallId) -> (Option<Arc<tt::Subtree>>, Option<String>);
-
- #[salsa::interned]
- fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
-
- fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>;
-}
-
-/// This expands the given macro call, but with different arguments. This is
-/// used for completion, where we want to see what 'would happen' if we insert a
-/// token. The `token_to_map` mapped down into the expansion, with the mapped
-/// token returned.
-pub fn expand_hypothetical(
- db: &dyn AstDatabase,
- actual_macro_call: MacroCallId,
- hypothetical_args: &syntax::ast::TokenTree,
- token_to_map: syntax::SyntaxToken,
-) -> Option<(SyntaxNode, syntax::SyntaxToken)> {
- let macro_file = MacroFile { macro_call_id: actual_macro_call };
- let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap();
- let range =
- token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
- let token_id = tmap_1.token_by_range(range)?;
- let macro_def = expander(db, actual_macro_call)?;
- let (node, tmap_2) =
- parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?;
- let token_id = macro_def.0.map_id_down(token_id);
- let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
- let token = syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?;
- Some((node.syntax_node(), token))
-}
-
-pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
- let map =
- db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it));
- Arc::new(map)
-}
-
-pub(crate) fn macro_def(
- db: &dyn AstDatabase,
- id: MacroDefId,
-) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
- match id.kind {
- MacroDefKind::Declarative => {
- let macro_call = id.ast_id?.to_node(db);
- let arg = macro_call.token_tree()?;
- let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
- log::warn!("fail on macro_def to token tree: {:#?}", arg);
- None
- })?;
- let rules = match MacroRules::parse(&tt) {
- Ok(it) => it,
- Err(err) => {
- log::warn!("fail on macro_def parse: error: {:#?} {:#?}", err, tt);
- return None;
- }
- };
- Some(Arc::new((TokenExpander::MacroRules(rules), tmap)))
- }
- MacroDefKind::BuiltIn(expander) => {
- Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default())))
- }
- MacroDefKind::BuiltInDerive(expander) => {
- Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default())))
- }
- MacroDefKind::BuiltInEager(_) => None,
- MacroDefKind::CustomDerive(expander) => {
- Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default())))
- }
- }
-}
-
-pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
- let id = match id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(_id) => {
- // FIXME: support macro_arg for eager macro
- return None;
- }
- };
- let loc = db.lookup_intern_macro(id);
- let arg = loc.kind.arg(db)?;
- Some(arg.green().clone())
-}
-
-pub(crate) fn macro_arg(
- db: &dyn AstDatabase,
- id: MacroCallId,
-) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
- let arg = db.macro_arg_text(id)?;
- let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?;
- Some(Arc::new((tt, tmap)))
-}
-
-pub(crate) fn macro_expand(
- db: &dyn AstDatabase,
- id: MacroCallId,
-) -> (Option<Arc<tt::Subtree>>, Option<String>) {
- macro_expand_with_arg(db, id, None)
-}
-
-fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
- let lazy_id = match id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(_id) => {
- return None;
- }
- };
-
- let loc = db.lookup_intern_macro(lazy_id);
- let macro_rules = db.macro_def(loc.def)?;
- Some(macro_rules)
-}
-
-fn macro_expand_with_arg(
- db: &dyn AstDatabase,
- id: MacroCallId,
- arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
-) -> (Option<Arc<tt::Subtree>>, Option<String>) {
- let lazy_id = match id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(id) => {
- if arg.is_some() {
- return (
- None,
- Some("hypothetical macro expansion not implemented for eager macro".to_owned()),
- );
- } else {
- return (Some(db.lookup_intern_eager_expansion(id).subtree), None);
- }
- }
- };
-
- let loc = db.lookup_intern_macro(lazy_id);
- let macro_arg = match arg.or_else(|| db.macro_arg(id)) {
- Some(it) => it,
- None => return (None, Some("Fail to args in to tt::TokenTree".into())),
- };
-
- let macro_rules = match db.macro_def(loc.def) {
- Some(it) => it,
- None => return (None, Some("Fail to find macro definition".into())),
- };
- let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0);
- // Set a hard limit for the expanded tt
- let count = tt.count();
- if count > 65536 {
- return (None, Some(format!("Total tokens count exceed limit : count = {}", count)));
- }
- (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e)))
-}
-
-pub(crate) fn expand_proc_macro(
- db: &dyn AstDatabase,
- id: MacroCallId,
-) -> Result<tt::Subtree, mbe::ExpandError> {
- let lazy_id = match id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(_) => unreachable!(),
- };
-
- let loc = db.lookup_intern_macro(lazy_id);
- let macro_arg = match db.macro_arg(id) {
- Some(it) => it,
- None => {
- return Err(
- tt::ExpansionError::Unknown("No arguments for proc-macro".to_string()).into()
- )
- }
- };
-
- let expander = match loc.def.kind {
- MacroDefKind::CustomDerive(expander) => expander,
- _ => unreachable!(),
- };
-
- expander.expand(db, lazy_id, ¯o_arg.0)
-}
-
-pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
- match file_id.0 {
- HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
- HirFileIdRepr::MacroFile(macro_file) => {
- db.parse_macro(macro_file).map(|(it, _)| it.syntax_node())
- }
- }
-}
-
-pub(crate) fn parse_macro(
- db: &dyn AstDatabase,
- macro_file: MacroFile,
-) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
- parse_macro_with_arg(db, macro_file, None)
-}
-
-pub fn parse_macro_with_arg(
- db: &dyn AstDatabase,
- macro_file: MacroFile,
- arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
-) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
- let _p = profile::span("parse_macro_query");
-
- let macro_call_id = macro_file.macro_call_id;
- let (tt, err) = if let Some(arg) = arg {
- macro_expand_with_arg(db, macro_call_id, Some(arg))
- } else {
- db.macro_expand(macro_call_id)
- };
- if let Some(err) = &err {
- // Note:
- // The final goal we would like to make all parse_macro success,
- // such that the following log will not call anyway.
- match macro_call_id {
- MacroCallId::LazyMacro(id) => {
- let loc: MacroCallLoc = db.lookup_intern_macro(id);
- let node = loc.kind.node(db);
-
- // collect parent information for warning log
- let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
- it.file_id.call_node(db)
- })
- .map(|n| format!("{:#}", n.value))
- .collect::<Vec<_>>()
- .join("\n");
-
- log::warn!(
- "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}",
- err,
- node.value,
- parents
- );
- }
- _ => {
- log::warn!("fail on macro_parse: (reason: {})", err);
- }
- }
- };
- let tt = tt?;
-
- let fragment_kind = to_fragment_kind(db, macro_call_id);
-
- let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?;
-
- if err.is_none() {
- Some((parse, Arc::new(rev_token_map)))
- } else {
- // FIXME:
- // In future, we should propagate the actual error with recovery information
- // instead of ignore the error here.
-
- // Safe check for recurisve identity macro
- let node = parse.syntax_node();
- let file: HirFileId = macro_file.into();
- let call_node = file.call_node(db)?;
-
- if !diff(&node, &call_node.value).is_empty() {
- Some((parse, Arc::new(rev_token_map)))
- } else {
- None
- }
- }
-}
-
-/// Given a `MacroCallId`, return what `FragmentKind` it belongs to.
-/// FIXME: Not completed
-fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
- let lazy_id = match id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(id) => {
- return db.lookup_intern_eager_expansion(id).fragment;
- }
- };
- let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value;
-
- let parent = match syn.parent() {
- Some(it) => it,
- None => {
- // FIXME:
- // If it is root, which means the parent HirFile
- // MacroKindFile must be non-items
- // return expr now.
- return FragmentKind::Expr;
- }
- };
-
- match parent.kind() {
- MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items,
- ITEM_LIST => FragmentKind::Items,
- LET_STMT => {
- // FIXME: Handle Pattern
- FragmentKind::Expr
- }
- // FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that
- EXPR_STMT | BLOCK_EXPR => FragmentKind::Expr,
- ARG_LIST => FragmentKind::Expr,
- TRY_EXPR => FragmentKind::Expr,
- TUPLE_EXPR => FragmentKind::Expr,
- PAREN_EXPR => FragmentKind::Expr,
-
- FOR_EXPR => FragmentKind::Expr,
- PATH_EXPR => FragmentKind::Expr,
- CLOSURE_EXPR => FragmentKind::Expr,
- CONDITION => FragmentKind::Expr,
- BREAK_EXPR => FragmentKind::Expr,
- RETURN_EXPR => FragmentKind::Expr,
- MATCH_EXPR => FragmentKind::Expr,
- MATCH_ARM => FragmentKind::Expr,
- MATCH_GUARD => FragmentKind::Expr,
- RECORD_EXPR_FIELD => FragmentKind::Expr,
- CALL_EXPR => FragmentKind::Expr,
- INDEX_EXPR => FragmentKind::Expr,
- METHOD_CALL_EXPR => FragmentKind::Expr,
- AWAIT_EXPR => FragmentKind::Expr,
- CAST_EXPR => FragmentKind::Expr,
- REF_EXPR => FragmentKind::Expr,
- PREFIX_EXPR => FragmentKind::Expr,
- RANGE_EXPR => FragmentKind::Expr,
- BIN_EXPR => FragmentKind::Expr,
- _ => {
- // Unknown , Just guess it is `Items`
- FragmentKind::Items
- }
- }
-}
+++ /dev/null
-//! Semantic errors and warnings.
-//!
-//! The `Diagnostic` trait defines a trait object which can represent any
-//! diagnostic.
-//!
-//! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating
-//! a `DiagnosticSink`, you supply a callback which can react to a `dyn
-//! Diagnostic` or to any concrete diagnostic (downcasting is sued internally).
-//!
-//! Because diagnostics store file offsets, it's a bad idea to store them
-//! directly in salsa. For this reason, every hir subsytem defines it's own
-//! strongly-typed closed set of diagnostics which use hir ids internally, are
-//! stored in salsa and do *not* implement the `Diagnostic` trait. Instead, a
-//! subsystem provides a separate, non-query-based API which can walk all stored
-//! values and transform them into instances of `Diagnostic`.
-
-use std::{any::Any, fmt};
-
-use syntax::SyntaxNodePtr;
-
-use crate::InFile;
-
-pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
- fn message(&self) -> String;
- /// Used in highlighting and related purposes
- fn display_source(&self) -> InFile<SyntaxNodePtr>;
- fn as_any(&self) -> &(dyn Any + Send + 'static);
- fn is_experimental(&self) -> bool {
- false
- }
-}
-
-pub struct DiagnosticSink<'a> {
- callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
- filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
- default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>,
-}
-
-impl<'a> DiagnosticSink<'a> {
- pub fn push(&mut self, d: impl Diagnostic) {
- let d: &dyn Diagnostic = &d;
- self._push(d);
- }
-
- fn _push(&mut self, d: &dyn Diagnostic) {
- for filter in &mut self.filters {
- if !filter(d) {
- return;
- }
- }
- for cb in &mut self.callbacks {
- match cb(d) {
- Ok(()) => return,
- Err(()) => (),
- }
- }
- (self.default_callback)(d)
- }
-}
-
-pub struct DiagnosticSinkBuilder<'a> {
- callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>,
- filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>,
-}
-
-impl<'a> DiagnosticSinkBuilder<'a> {
- pub fn new() -> Self {
- Self { callbacks: Vec::new(), filters: Vec::new() }
- }
-
- pub fn filter<F: FnMut(&dyn Diagnostic) -> bool + 'a>(mut self, cb: F) -> Self {
- self.filters.push(Box::new(cb));
- self
- }
-
- pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> Self {
- let cb = move |diag: &dyn Diagnostic| match diag.as_any().downcast_ref::<D>() {
- Some(d) => {
- cb(d);
- Ok(())
- }
- None => Err(()),
- };
- self.callbacks.push(Box::new(cb));
- self
- }
-
- pub fn build<F: FnMut(&dyn Diagnostic) + 'a>(self, default_callback: F) -> DiagnosticSink<'a> {
- DiagnosticSink {
- callbacks: self.callbacks,
- filters: self.filters,
- default_callback: Box::new(default_callback),
- }
- }
-}
+++ /dev/null
-//! Eager expansion related utils
-//!
-//! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and
-//! Its name resolution :
-//!
-//! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros,
-//! > which actually happens in practice too!) are resolved at the location of the "root" macro
-//! > that performs the eager expansion on its arguments.
-//! > If some name cannot be resolved at the eager expansion time it's considered unresolved,
-//! > even if becomes available later (e.g. from a glob import or other macro).
-//!
-//! > Eagerly expanded macros don't add anything to the module structure of the crate and
-//! > don't build any speculative module structures, i.e. they are expanded in a "flat"
-//! > way even if tokens in them look like modules.
-//!
-//! > In other words, it kinda works for simple cases for which it was originally intended,
-//! > and we need to live with it because it's available on stable and widely relied upon.
-//!
-//!
-//! See the full discussion : https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros
-
-use crate::{
- ast::{self, AstNode},
- db::AstDatabase,
- EagerCallLoc, EagerMacroId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
-};
-
-use base_db::CrateId;
-use parser::FragmentKind;
-use std::sync::Arc;
-use syntax::{algo::SyntaxRewriter, SyntaxNode};
-
-pub fn expand_eager_macro(
- db: &dyn AstDatabase,
- krate: CrateId,
- macro_call: InFile<ast::MacroCall>,
- def: MacroDefId,
- resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
-) -> Option<EagerMacroId> {
- let args = macro_call.value.token_tree()?;
- let parsed_args = mbe::ast_to_token_tree(&args)?.0;
-
- // Note:
- // When `lazy_expand` is called, its *parent* file must be already exists.
- // Here we store an eager macro id for the argument expanded subtree here
- // for that purpose.
- let arg_id = db.intern_eager_expansion({
- EagerCallLoc {
- def,
- fragment: FragmentKind::Expr,
- subtree: Arc::new(parsed_args.clone()),
- krate,
- file_id: macro_call.file_id,
- }
- });
- let arg_file_id: MacroCallId = arg_id.into();
-
- let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr).ok()?.0;
- let result = eager_macro_recur(
- db,
- InFile::new(arg_file_id.as_file(), parsed_args.syntax_node()),
- krate,
- resolver,
- )?;
- let subtree = to_subtree(&result)?;
-
- if let MacroDefKind::BuiltInEager(eager) = def.kind {
- let (subtree, fragment) = eager.expand(db, arg_id, &subtree).ok()?;
- let eager = EagerCallLoc {
- def,
- fragment,
- subtree: Arc::new(subtree),
- krate,
- file_id: macro_call.file_id,
- };
-
- Some(db.intern_eager_expansion(eager))
- } else {
- None
- }
-}
-
-fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> {
- let mut subtree = mbe::syntax_node_to_token_tree(node)?.0;
- subtree.delimiter = None;
- Some(subtree)
-}
-
-fn lazy_expand(
- db: &dyn AstDatabase,
- def: &MacroDefId,
- macro_call: InFile<ast::MacroCall>,
- krate: CrateId,
-) -> Option<InFile<SyntaxNode>> {
- let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
-
- let id: MacroCallId =
- def.as_lazy_macro(db, krate, MacroCallKind::FnLike(macro_call.with_value(ast_id))).into();
-
- db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node))
-}
-
-fn eager_macro_recur(
- db: &dyn AstDatabase,
- curr: InFile<SyntaxNode>,
- krate: CrateId,
- macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
-) -> Option<SyntaxNode> {
- let original = curr.value.clone();
-
- let children = curr.value.descendants().filter_map(ast::MacroCall::cast);
- let mut rewriter = SyntaxRewriter::default();
-
- // Collect replacement
- for child in children {
- let def: MacroDefId = macro_resolver(child.path()?)?;
- let insert = match def.kind {
- MacroDefKind::BuiltInEager(_) => {
- let id: MacroCallId = expand_eager_macro(
- db,
- krate,
- curr.with_value(child.clone()),
- def,
- macro_resolver,
- )?
- .into();
- db.parse_or_expand(id.as_file())?
- }
- MacroDefKind::Declarative
- | MacroDefKind::BuiltIn(_)
- | MacroDefKind::BuiltInDerive(_)
- | MacroDefKind::CustomDerive(_) => {
- let expanded = lazy_expand(db, &def, curr.with_value(child.clone()), krate)?;
- // replace macro inside
- eager_macro_recur(db, expanded, krate, macro_resolver)?
- }
- };
-
- rewriter.replace(child.syntax(), &insert);
- }
-
- let res = rewriter.rewrite(&original);
- Some(res)
-}
+++ /dev/null
-//! This modules handles hygiene information.
-//!
-//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
-//! this moment, this is horribly incomplete and handles only `$crate`.
-use base_db::CrateId;
-use either::Either;
-use syntax::ast;
-
-use crate::{
- db::AstDatabase,
- name::{AsName, Name},
- HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind,
-};
-
-#[derive(Clone, Debug)]
-pub struct Hygiene {
- // This is what `$crate` expands to
- def_crate: Option<CrateId>,
-
- // Indicate this is a local inner macro
- local_inner: bool,
-}
-
-impl Hygiene {
- pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
- let (def_crate, local_inner) = match file_id.0 {
- HirFileIdRepr::FileId(_) => (None, false),
- HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id {
- MacroCallId::LazyMacro(id) => {
- let loc = db.lookup_intern_macro(id);
- match loc.def.kind {
- MacroDefKind::Declarative => (loc.def.krate, loc.def.local_inner),
- MacroDefKind::BuiltIn(_) => (None, false),
- MacroDefKind::BuiltInDerive(_) => (None, false),
- MacroDefKind::BuiltInEager(_) => (None, false),
- MacroDefKind::CustomDerive(_) => (None, false),
- }
- }
- MacroCallId::EagerMacro(_id) => (None, false),
- },
- };
- Hygiene { def_crate, local_inner }
- }
-
- pub fn new_unhygienic() -> Hygiene {
- Hygiene { def_crate: None, local_inner: false }
- }
-
- // FIXME: this should just return name
- pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> {
- if let Some(def_crate) = self.def_crate {
- if name_ref.text() == "$crate" {
- return Either::Right(def_crate);
- }
- }
- Either::Left(name_ref.as_name())
- }
-
- pub fn local_inner_macros(&self) -> Option<CrateId> {
- if self.local_inner {
- self.def_crate
- } else {
- None
- }
- }
-}
+++ /dev/null
-//! `ra_hir_expand` deals with macro expansion.
-//!
-//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
-//! tree originates not from the text of some `FileId`, but from some macro
-//! expansion.
-
-pub mod db;
-pub mod ast_id_map;
-pub mod name;
-pub mod hygiene;
-pub mod diagnostics;
-pub mod builtin_derive;
-pub mod builtin_macro;
-pub mod proc_macro;
-pub mod quote;
-pub mod eager;
-
-use std::hash::Hash;
-use std::sync::Arc;
-
-use base_db::{impl_intern_key, salsa, CrateId, FileId};
-use syntax::{
- algo,
- ast::{self, AstNode},
- SyntaxNode, SyntaxToken, TextSize,
-};
-
-use crate::ast_id_map::FileAstId;
-use crate::builtin_derive::BuiltinDeriveExpander;
-use crate::builtin_macro::{BuiltinFnLikeExpander, EagerExpander};
-use crate::proc_macro::ProcMacroExpander;
-
-#[cfg(test)]
-mod test_db;
-
-/// Input to the analyzer is a set of files, where each file is identified by
-/// `FileId` and contains source code. However, another source of source code in
-/// Rust are macros: each macro can be thought of as producing a "temporary
-/// file". To assign an id to such a file, we use the id of the macro call that
-/// produced the file. So, a `HirFileId` is either a `FileId` (source code
-/// written by user), or a `MacroCallId` (source code produced by macro).
-///
-/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
-/// containing the call plus the offset of the macro call in the file. Note that
-/// this is a recursive definition! However, the size_of of `HirFileId` is
-/// finite (because everything bottoms out at the real `FileId`) and small
-/// (`MacroCallId` uses the location interning. You can check details here:
-/// https://en.wikipedia.org/wiki/String_interning).
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct HirFileId(HirFileIdRepr);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-enum HirFileIdRepr {
- FileId(FileId),
- MacroFile(MacroFile),
-}
-
-impl From<FileId> for HirFileId {
- fn from(id: FileId) -> Self {
- HirFileId(HirFileIdRepr::FileId(id))
- }
-}
-
-impl From<MacroFile> for HirFileId {
- fn from(id: MacroFile) -> Self {
- HirFileId(HirFileIdRepr::MacroFile(id))
- }
-}
-
-impl HirFileId {
- /// For macro-expansion files, returns the file original source file the
- /// expansion originated from.
- pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
- match self.0 {
- HirFileIdRepr::FileId(file_id) => file_id,
- HirFileIdRepr::MacroFile(macro_file) => {
- let file_id = match macro_file.macro_call_id {
- MacroCallId::LazyMacro(id) => {
- let loc = db.lookup_intern_macro(id);
- loc.kind.file_id()
- }
- MacroCallId::EagerMacro(id) => {
- let loc = db.lookup_intern_eager_expansion(id);
- loc.file_id
- }
- };
- file_id.original_file(db)
- }
- }
- }
-
- pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
- let mut level = 0;
- let mut curr = self;
- while let HirFileIdRepr::MacroFile(macro_file) = curr.0 {
- level += 1;
- curr = match macro_file.macro_call_id {
- MacroCallId::LazyMacro(id) => {
- let loc = db.lookup_intern_macro(id);
- loc.kind.file_id()
- }
- MacroCallId::EagerMacro(id) => {
- let loc = db.lookup_intern_eager_expansion(id);
- loc.file_id
- }
- };
- }
- level
- }
-
- /// If this is a macro call, returns the syntax node of the call.
- pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
- match self.0 {
- HirFileIdRepr::FileId(_) => None,
- HirFileIdRepr::MacroFile(macro_file) => {
- let lazy_id = match macro_file.macro_call_id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(_id) => {
- // FIXME: handle call node for eager macro
- return None;
- }
- };
- let loc = db.lookup_intern_macro(lazy_id);
- Some(loc.kind.node(db))
- }
- }
- }
-
- /// Return expansion information if it is a macro-expansion file
- pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
- match self.0 {
- HirFileIdRepr::FileId(_) => None,
- HirFileIdRepr::MacroFile(macro_file) => {
- let lazy_id = match macro_file.macro_call_id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(_id) => {
- // FIXME: handle expansion_info for eager macro
- return None;
- }
- };
- let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id);
-
- let arg_tt = loc.kind.arg(db)?;
- let def_tt = loc.def.ast_id?.to_node(db).token_tree()?;
-
- let macro_def = db.macro_def(loc.def)?;
- let (parse, exp_map) = db.parse_macro(macro_file)?;
- let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
-
- Some(ExpansionInfo {
- expanded: InFile::new(self, parse.syntax_node()),
- arg: InFile::new(loc.kind.file_id(), arg_tt),
- def: InFile::new(loc.def.ast_id?.file_id, def_tt),
- macro_arg,
- macro_def,
- exp_map,
- })
- }
- }
- }
-
- /// Indicate it is macro file generated for builtin derive
- pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Item>> {
- match self.0 {
- HirFileIdRepr::FileId(_) => None,
- HirFileIdRepr::MacroFile(macro_file) => {
- let lazy_id = match macro_file.macro_call_id {
- MacroCallId::LazyMacro(id) => id,
- MacroCallId::EagerMacro(_id) => {
- return None;
- }
- };
- let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id);
- let item = match loc.def.kind {
- MacroDefKind::BuiltInDerive(_) => loc.kind.node(db),
- _ => return None,
- };
- Some(item.with_value(ast::Item::cast(item.value.clone())?))
- }
- }
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroFile {
- macro_call_id: MacroCallId,
-}
-
-/// `MacroCallId` identifies a particular macro invocation, like
-/// `println!("Hello, {}", world)`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum MacroCallId {
- LazyMacro(LazyMacroId),
- EagerMacro(EagerMacroId),
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct LazyMacroId(salsa::InternId);
-impl_intern_key!(LazyMacroId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct EagerMacroId(salsa::InternId);
-impl_intern_key!(EagerMacroId);
-
-impl From<LazyMacroId> for MacroCallId {
- fn from(it: LazyMacroId) -> Self {
- MacroCallId::LazyMacro(it)
- }
-}
-impl From<EagerMacroId> for MacroCallId {
- fn from(it: EagerMacroId) -> Self {
- MacroCallId::EagerMacro(it)
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroDefId {
- // FIXME: krate and ast_id are currently optional because we don't have a
- // definition location for built-in derives. There is one, though: the
- // standard library defines them. The problem is that it uses the new
- // `macro` syntax for this, which we don't support yet. As soon as we do
- // (which will probably require touching this code), we can instead use
- // that (and also remove the hacks for resolving built-in derives).
- pub krate: Option<CrateId>,
- pub ast_id: Option<AstId<ast::MacroCall>>,
- pub kind: MacroDefKind,
-
- pub local_inner: bool,
-}
-
-impl MacroDefId {
- pub fn as_lazy_macro(
- self,
- db: &dyn db::AstDatabase,
- krate: CrateId,
- kind: MacroCallKind,
- ) -> LazyMacroId {
- db.intern_macro(MacroCallLoc { def: self, krate, kind })
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum MacroDefKind {
- Declarative,
- BuiltIn(BuiltinFnLikeExpander),
- // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
- BuiltInDerive(BuiltinDeriveExpander),
- BuiltInEager(EagerExpander),
- CustomDerive(ProcMacroExpander),
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct MacroCallLoc {
- pub(crate) def: MacroDefId,
- pub(crate) krate: CrateId,
- pub(crate) kind: MacroCallKind,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum MacroCallKind {
- FnLike(AstId<ast::MacroCall>),
- Attr(AstId<ast::Item>, String),
-}
-
-impl MacroCallKind {
- fn file_id(&self) -> HirFileId {
- match self {
- MacroCallKind::FnLike(ast_id) => ast_id.file_id,
- MacroCallKind::Attr(ast_id, _) => ast_id.file_id,
- }
- }
-
- fn node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
- match self {
- MacroCallKind::FnLike(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()),
- MacroCallKind::Attr(ast_id, _) => {
- ast_id.with_value(ast_id.to_node(db).syntax().clone())
- }
- }
- }
-
- fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
- match self {
- MacroCallKind::FnLike(ast_id) => {
- Some(ast_id.to_node(db).token_tree()?.syntax().clone())
- }
- MacroCallKind::Attr(ast_id, _) => Some(ast_id.to_node(db).syntax().clone()),
- }
- }
-}
-
-impl MacroCallId {
- pub fn as_file(self) -> HirFileId {
- MacroFile { macro_call_id: self }.into()
- }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct EagerCallLoc {
- pub(crate) def: MacroDefId,
- pub(crate) fragment: FragmentKind,
- pub(crate) subtree: Arc<tt::Subtree>,
- pub(crate) krate: CrateId,
- pub(crate) file_id: HirFileId,
-}
-
-/// ExpansionInfo mainly describes how to map text range between src and expanded macro
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct ExpansionInfo {
- expanded: InFile<SyntaxNode>,
- arg: InFile<SyntaxNode>,
- def: InFile<ast::TokenTree>,
-
- macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
- macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
- exp_map: Arc<mbe::TokenMap>,
-}
-
-pub use mbe::Origin;
-use parser::FragmentKind;
-
-impl ExpansionInfo {
- pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
- Some(self.arg.with_value(self.arg.value.parent()?))
- }
-
- pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<SyntaxToken>> {
- assert_eq!(token.file_id, self.arg.file_id);
- let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
- let token_id = self.macro_arg.1.token_by_range(range)?;
- let token_id = self.macro_def.0.map_id_down(token_id);
-
- let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
-
- let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
-
- Some(self.expanded.with_value(token))
- }
-
- pub fn map_token_up(
- &self,
- token: InFile<&SyntaxToken>,
- ) -> Option<(InFile<SyntaxToken>, Origin)> {
- let token_id = self.exp_map.token_by_range(token.value.text_range())?;
-
- let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
- let (token_map, tt) = match origin {
- mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
- mbe::Origin::Def => {
- (&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone()))
- }
- };
-
- let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
- let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
- .into_token()?;
- Some((tt.with_value(token), origin))
- }
-}
-
-/// `AstId` points to an AST node in any file.
-///
-/// It is stable across reparses, and can be used as salsa key/value.
-// FIXME: isn't this just a `Source<FileAstId<N>>` ?
-pub type AstId<N> = InFile<FileAstId<N>>;
-
-impl<N: AstNode> AstId<N> {
- pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
- let root = db.parse_or_expand(self.file_id).unwrap();
- db.ast_id_map(self.file_id).get(self.value).to_node(&root)
- }
-}
-
-/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
-///
-/// Typical usages are:
-///
-/// * `InFile<SyntaxNode>` -- syntax node in a file
-/// * `InFile<ast::FnDef>` -- ast node in a file
-/// * `InFile<TextSize>` -- offset in a file
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InFile<T> {
- pub file_id: HirFileId,
- pub value: T,
-}
-
-impl<T> InFile<T> {
- pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
- InFile { file_id, value }
- }
-
- // Similarly, naming here is stupid...
- pub fn with_value<U>(&self, value: U) -> InFile<U> {
- InFile::new(self.file_id, value)
- }
-
- pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
- InFile::new(self.file_id, f(self.value))
- }
- pub fn as_ref(&self) -> InFile<&T> {
- self.with_value(&self.value)
- }
- pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
- db.parse_or_expand(self.file_id).expect("source created from invalid file")
- }
-}
-
-impl<T: Clone> InFile<&T> {
- pub fn cloned(&self) -> InFile<T> {
- self.with_value(self.value.clone())
- }
-}
-
-impl<T> InFile<Option<T>> {
- pub fn transpose(self) -> Option<InFile<T>> {
- let value = self.value?;
- Some(InFile::new(self.file_id, value))
- }
-}
-
-impl InFile<SyntaxNode> {
- pub fn ancestors_with_macros(
- self,
- db: &dyn db::AstDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
- std::iter::successors(Some(self), move |node| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => {
- let parent_node = node.file_id.call_node(db)?;
- Some(parent_node)
- }
- })
- }
-}
-
-impl InFile<SyntaxToken> {
- pub fn ancestors_with_macros(
- self,
- db: &dyn db::AstDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
- self.map(|it| it.parent()).ancestors_with_macros(db)
- }
-}
-
-impl<N: AstNode> InFile<N> {
- pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
- self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
- }
-
- pub fn syntax(&self) -> InFile<&SyntaxNode> {
- self.with_value(self.value.syntax())
- }
-}
+++ /dev/null
-//! FIXME: write short doc here
-
-use std::fmt;
-
-use syntax::{ast, SmolStr};
-
-/// `Name` is a wrapper around string, which is used in hir for both references
-/// and declarations. In theory, names should also carry hygiene info, but we are
-/// not there yet!
-#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct Name(Repr);
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
-enum Repr {
- Text(SmolStr),
- TupleField(usize),
-}
-
-impl fmt::Display for Name {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match &self.0 {
- Repr::Text(text) => fmt::Display::fmt(&text, f),
- Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
- }
- }
-}
-
-impl Name {
- /// Note: this is private to make creating name from random string hard.
- /// Hopefully, this should allow us to integrate hygiene cleaner in the
- /// future, and to switch to interned representation of names.
- const fn new_text(text: SmolStr) -> Name {
- Name(Repr::Text(text))
- }
-
- pub fn new_tuple_field(idx: usize) -> Name {
- Name(Repr::TupleField(idx))
- }
-
- pub fn new_lifetime(lt: &syntax::SyntaxToken) -> Name {
- assert!(lt.kind() == syntax::SyntaxKind::LIFETIME);
- Name(Repr::Text(lt.text().clone()))
- }
-
- /// Shortcut to create inline plain text name
- const fn new_inline_ascii(text: &[u8]) -> Name {
- Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text))
- }
-
- /// Resolve a name from the text of token.
- fn resolve(raw_text: &SmolStr) -> Name {
- let raw_start = "r#";
- if raw_text.as_str().starts_with(raw_start) {
- Name::new_text(SmolStr::new(&raw_text[raw_start.len()..]))
- } else {
- Name::new_text(raw_text.clone())
- }
- }
-
- pub fn missing() -> Name {
- Name::new_text("[missing name]".into())
- }
-
- pub fn as_tuple_index(&self) -> Option<usize> {
- match self.0 {
- Repr::TupleField(idx) => Some(idx),
- _ => None,
- }
- }
-}
-
-pub trait AsName {
- fn as_name(&self) -> Name;
-}
-
-impl AsName for ast::NameRef {
- fn as_name(&self) -> Name {
- match self.as_tuple_field() {
- Some(idx) => Name::new_tuple_field(idx),
- None => Name::resolve(self.text()),
- }
- }
-}
-
-impl AsName for ast::Name {
- fn as_name(&self) -> Name {
- Name::resolve(self.text())
- }
-}
-
-impl AsName for ast::NameOrNameRef {
- fn as_name(&self) -> Name {
- match self {
- ast::NameOrNameRef::Name(it) => it.as_name(),
- ast::NameOrNameRef::NameRef(it) => it.as_name(),
- }
- }
-}
-
-impl AsName for tt::Ident {
- fn as_name(&self) -> Name {
- Name::resolve(&self.text)
- }
-}
-
-impl AsName for ast::FieldKind {
- fn as_name(&self) -> Name {
- match self {
- ast::FieldKind::Name(nr) => nr.as_name(),
- ast::FieldKind::Index(idx) => {
- let idx = idx.text().parse::<usize>().unwrap_or(0);
- Name::new_tuple_field(idx)
- }
- }
- }
-}
-
-impl AsName for base_db::Dependency {
- fn as_name(&self) -> Name {
- Name::new_text(SmolStr::new(&*self.name))
- }
-}
-
-pub mod known {
- macro_rules! known_names {
- ($($ident:ident),* $(,)?) => {
- $(
- #[allow(bad_style)]
- pub const $ident: super::Name =
- super::Name::new_inline_ascii(stringify!($ident).as_bytes());
- )*
- };
- }
-
- known_names!(
- // Primitives
- isize,
- i8,
- i16,
- i32,
- i64,
- i128,
- usize,
- u8,
- u16,
- u32,
- u64,
- u128,
- f32,
- f64,
- bool,
- char,
- str,
- // Special names
- macro_rules,
- doc,
- // Components of known path (value or mod name)
- std,
- core,
- alloc,
- iter,
- ops,
- future,
- result,
- boxed,
- // Components of known path (type name)
- IntoIterator,
- Item,
- Try,
- Ok,
- Future,
- Result,
- Output,
- Target,
- Box,
- RangeFrom,
- RangeFull,
- RangeInclusive,
- RangeToInclusive,
- RangeTo,
- Range,
- Neg,
- Not,
- Index,
- // Builtin macros
- file,
- column,
- compile_error,
- line,
- assert,
- stringify,
- concat,
- include,
- include_bytes,
- include_str,
- format_args,
- format_args_nl,
- env,
- option_env,
- // Builtin derives
- Copy,
- Clone,
- Default,
- Debug,
- Hash,
- Ord,
- PartialOrd,
- Eq,
- PartialEq,
- );
-
- // self/Self cannot be used as an identifier
- pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self");
- pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self");
-
- #[macro_export]
- macro_rules! name {
- (self) => {
- $crate::name::known::SELF_PARAM
- };
- (Self) => {
- $crate::name::known::SELF_TYPE
- };
- ($ident:ident) => {
- $crate::name::known::$ident
- };
- }
-}
-
-pub use crate::name;
+++ /dev/null
-//! Proc Macro Expander stub
-
-use crate::{db::AstDatabase, LazyMacroId};
-use base_db::{CrateId, ProcMacroId};
-use tt::buffer::{Cursor, TokenBuffer};
-
-#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub struct ProcMacroExpander {
- krate: CrateId,
- proc_macro_id: ProcMacroId,
-}
-
-macro_rules! err {
- ($fmt:literal, $($tt:tt),*) => {
- mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown(format!($fmt, $($tt),*)))
- };
- ($fmt:literal) => {
- mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown($fmt.to_string()))
- }
-}
-
-impl ProcMacroExpander {
- pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> ProcMacroExpander {
- ProcMacroExpander { krate, proc_macro_id }
- }
-
- pub fn expand(
- self,
- db: &dyn AstDatabase,
- _id: LazyMacroId,
- tt: &tt::Subtree,
- ) -> Result<tt::Subtree, mbe::ExpandError> {
- let krate_graph = db.crate_graph();
- let proc_macro = krate_graph[self.krate]
- .proc_macro
- .get(self.proc_macro_id.0 as usize)
- .clone()
- .ok_or_else(|| err!("No derive macro found."))?;
-
- let tt = remove_derive_attrs(tt)
- .ok_or_else(|| err!("Fail to remove derive for custom derive"))?;
-
- proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from)
- }
-}
-
-fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
- if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() {
- if punct.char == c {
- *cursor = cursor.bump();
- return true;
- }
- }
- false
-}
-
-fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
- if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() {
- if Some(kind) == subtree.delimiter_kind() {
- *cursor = cursor.bump_subtree();
- return true;
- }
- }
- false
-}
-
-fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
- if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() {
- if t == ident.text.as_str() {
- *cursor = cursor.bump();
- return true;
- }
- }
- false
-}
-
-fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
- let buffer = TokenBuffer::new(&tt.token_trees);
- let mut p = buffer.begin();
- let mut result = tt::Subtree::default();
-
- while !p.eof() {
- let curr = p;
-
- if eat_punct(&mut p, '#') {
- eat_punct(&mut p, '!');
- let parent = p;
- if eat_subtree(&mut p, tt::DelimiterKind::Bracket) {
- if eat_ident(&mut p, "derive") {
- p = parent.bump();
- continue;
- }
- }
- }
-
- result.token_trees.push(curr.token_tree()?.clone());
- p = curr.bump();
- }
-
- Some(result)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use test_utils::assert_eq_text;
-
- #[test]
- fn test_remove_derive_attrs() {
- let tt = mbe::parse_to_token_tree(
- r#"
- #[allow(unused)]
- #[derive(Copy)]
- #[derive(Hello)]
- struct A {
- bar: u32
- }
-"#,
- )
- .unwrap()
- .0;
- let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap());
-
- assert_eq_text!(
- &result,
- r#"
-SUBTREE $
- PUNCH # [alone] 0
- SUBTREE [] 1
- IDENT allow 2
- SUBTREE () 3
- IDENT unused 4
- IDENT struct 15
- IDENT A 16
- SUBTREE {} 17
- IDENT bar 18
- PUNCH : [alone] 19
- IDENT u32 20
-"#
- .trim()
- );
- }
-}
+++ /dev/null
-//! A simplified version of quote-crate like quasi quote macro
-
-// A helper macro quote macro
-// FIXME:
-// 1. Not all puncts are handled
-// 2. #()* pattern repetition not supported now
-// * But we can do it manually, see `test_quote_derive_copy_hack`
-#[doc(hidden)]
-#[macro_export]
-macro_rules! __quote {
- () => {
- Vec::<tt::TokenTree>::new()
- };
-
- ( @SUBTREE $delim:ident $($tt:tt)* ) => {
- {
- let children = $crate::__quote!($($tt)*);
- tt::Subtree {
- delimiter: Some(tt::Delimiter {
- kind: tt::DelimiterKind::$delim,
- id: tt::TokenId::unspecified(),
- }),
- token_trees: $crate::quote::IntoTt::to_tokens(children),
- }
- }
- };
-
- ( @PUNCT $first:literal ) => {
- {
- vec![
- tt::Leaf::Punct(tt::Punct {
- char: $first,
- spacing: tt::Spacing::Alone,
- id: tt::TokenId::unspecified(),
- }).into()
- ]
- }
- };
-
- ( @PUNCT $first:literal, $sec:literal ) => {
- {
- vec![
- tt::Leaf::Punct(tt::Punct {
- char: $first,
- spacing: tt::Spacing::Joint,
- id: tt::TokenId::unspecified(),
- }).into(),
- tt::Leaf::Punct(tt::Punct {
- char: $sec,
- spacing: tt::Spacing::Alone,
- id: tt::TokenId::unspecified(),
- }).into()
- ]
- }
- };
-
- // hash variable
- ( # $first:ident $($tail:tt)* ) => {
- {
- let token = $crate::quote::ToTokenTree::to_token($first);
- let mut tokens = vec![token.into()];
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
- tokens.append(&mut tail_tokens);
- tokens
- }
- };
-
- ( ## $first:ident $($tail:tt)* ) => {
- {
- let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
- tokens.append(&mut tail_tokens);
- tokens
- }
- };
-
- // Brace
- ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
- // Bracket
- ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
- // Parenthesis
- ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
-
- // Literal
- ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
- // Ident
- ( $tt:ident ) => {
- vec![ {
- tt::Leaf::Ident(tt::Ident {
- text: stringify!($tt).into(),
- id: tt::TokenId::unspecified(),
- }).into()
- }]
- };
-
- // Puncts
- // FIXME: Not all puncts are handled
- ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
- ( & ) => {$crate::__quote!(@PUNCT '&')};
- ( , ) => {$crate::__quote!(@PUNCT ',')};
- ( : ) => {$crate::__quote!(@PUNCT ':')};
- ( ; ) => {$crate::__quote!(@PUNCT ';')};
- ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
- ( . ) => {$crate::__quote!(@PUNCT '.')};
- ( < ) => {$crate::__quote!(@PUNCT '<')};
- ( > ) => {$crate::__quote!(@PUNCT '>')};
-
- ( $first:tt $($tail:tt)+ ) => {
- {
- let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
-
- tokens.append(&mut tail_tokens);
- tokens
- }
- };
-}
-
-/// FIXME:
-/// It probably should implement in proc-macro
-#[macro_export]
-macro_rules! quote {
- ( $($tt:tt)* ) => {
- $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
- }
-}
-
-pub(crate) trait IntoTt {
- fn to_subtree(self) -> tt::Subtree;
- fn to_tokens(self) -> Vec<tt::TokenTree>;
-}
-
-impl IntoTt for Vec<tt::TokenTree> {
- fn to_subtree(self) -> tt::Subtree {
- tt::Subtree { delimiter: None, token_trees: self }
- }
-
- fn to_tokens(self) -> Vec<tt::TokenTree> {
- self
- }
-}
-
-impl IntoTt for tt::Subtree {
- fn to_subtree(self) -> tt::Subtree {
- self
- }
-
- fn to_tokens(self) -> Vec<tt::TokenTree> {
- vec![tt::TokenTree::Subtree(self)]
- }
-}
-
-pub(crate) trait ToTokenTree {
- fn to_token(self) -> tt::TokenTree;
-}
-
-impl ToTokenTree for tt::TokenTree {
- fn to_token(self) -> tt::TokenTree {
- self
- }
-}
-
-impl ToTokenTree for tt::Subtree {
- fn to_token(self) -> tt::TokenTree {
- self.into()
- }
-}
-
-macro_rules! impl_to_to_tokentrees {
- ($($ty:ty => $this:ident $im:block);*) => {
- $(
- impl ToTokenTree for $ty {
- fn to_token($this) -> tt::TokenTree {
- let leaf: tt::Leaf = $im.into();
- leaf.into()
- }
- }
-
- impl ToTokenTree for &$ty {
- fn to_token($this) -> tt::TokenTree {
- let leaf: tt::Leaf = $im.clone().into();
- leaf.into()
- }
- }
- )*
- }
-}
-
-impl_to_to_tokentrees! {
- u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
- usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
- i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
- tt::Leaf => self { self };
- tt::Literal => self { self };
- tt::Ident => self { self };
- tt::Punct => self { self };
- &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
- String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
-}
-
-#[cfg(test)]
-mod tests {
- #[test]
- fn test_quote_delimiters() {
- assert_eq!(quote!({}).to_string(), "{}");
- assert_eq!(quote!(()).to_string(), "()");
- assert_eq!(quote!([]).to_string(), "[]");
- }
-
- #[test]
- fn test_quote_idents() {
- assert_eq!(quote!(32).to_string(), "32");
- assert_eq!(quote!(struct).to_string(), "struct");
- }
-
- #[test]
- fn test_quote_hash_simple_literal() {
- let a = 20;
- assert_eq!(quote!(#a).to_string(), "20");
- let s: String = "hello".into();
- assert_eq!(quote!(#s).to_string(), "\"hello\"");
- }
-
- fn mk_ident(name: &str) -> tt::Ident {
- tt::Ident { text: name.into(), id: tt::TokenId::unspecified() }
- }
-
- #[test]
- fn test_quote_hash_token_tree() {
- let a = mk_ident("hello");
-
- let quoted = quote!(#a);
- assert_eq!(quoted.to_string(), "hello");
- let t = format!("{:?}", quoted);
- assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295");
- }
-
- #[test]
- fn test_quote_simple_derive_copy() {
- let name = mk_ident("Foo");
-
- let quoted = quote! {
- impl Clone for #name {
- fn clone(&self) -> Self {
- Self {}
- }
- }
- };
-
- assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {}}}");
- }
-
- #[test]
- fn test_quote_derive_copy_hack() {
- // Assume the given struct is:
- // struct Foo {
- // name: String,
- // id: u32,
- // }
- let struct_name = mk_ident("Foo");
- let fields = [mk_ident("name"), mk_ident("id")];
- let fields = fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees).flatten();
-
- let list = tt::Subtree {
- delimiter: Some(tt::Delimiter {
- kind: tt::DelimiterKind::Brace,
- id: tt::TokenId::unspecified(),
- }),
- token_trees: fields.collect(),
- };
-
- let quoted = quote! {
- impl Clone for #struct_name {
- fn clone(&self) -> Self {
- Self #list
- }
- }
- };
-
- assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}");
- }
-}
+++ /dev/null
-//! Database used for testing `hir_expand`.
-
-use std::{
- fmt, panic,
- sync::{Arc, Mutex},
-};
-
-use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate};
-use rustc_hash::FxHashSet;
-
-#[salsa::database(
- base_db::SourceDatabaseExtStorage,
- base_db::SourceDatabaseStorage,
- crate::db::AstDatabaseStorage
-)]
-#[derive(Default)]
-pub struct TestDB {
- storage: salsa::Storage<TestDB>,
- events: Mutex<Option<Vec<salsa::Event>>>,
-}
-
-impl fmt::Debug for TestDB {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("TestDB").finish()
- }
-}
-
-impl salsa::Database for TestDB {
- fn salsa_event(&self, event: salsa::Event) {
- let mut events = self.events.lock().unwrap();
- if let Some(events) = &mut *events {
- events.push(event);
- }
- }
-}
-
-impl panic::RefUnwindSafe for TestDB {}
-
-impl FileLoader for TestDB {
- fn file_text(&self, file_id: FileId) -> Arc<String> {
- FileLoaderDelegate(self).file_text(file_id)
- }
- fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
- FileLoaderDelegate(self).resolve_path(anchor, path)
- }
- fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
- FileLoaderDelegate(self).relevant_crates(file_id)
- }
-}
stdx = { path = "../stdx" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
-hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
+hir_expand = { path = "../hir_expand" }
arena = { path = "../arena" }
base_db = { path = "../base_db" }
profile = { path = "../profile" }
let poorly_documented = [
"ra_hir",
- "ra_hir_expand",
+ "hir_expand",
"ra_ide",
"mbe",
"parser",