]> git.lizzy.rs Git - rust.git/commitdiff
Rename proc macro server from 'Rustc' to 'RustAnalyzer'
authorAmos Wenger <amoswenger@gmail.com>
Wed, 20 Jul 2022 13:40:23 +0000 (15:40 +0200)
committerAmos Wenger <amoswenger@gmail.com>
Wed, 20 Jul 2022 13:40:23 +0000 (15:40 +0200)
crates/hir-ty/src/tests/traits.rs
crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs [new file with mode: 0644]
crates/proc-macro-srv/src/abis/abi_1_58/rustc_server.rs [deleted file]
crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs [new file with mode: 0644]
crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs [deleted file]
crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs [new file with mode: 0644]
crates/proc-macro-srv/src/abis/abi_1_64/rustc_server.rs [deleted file]

index aa8b420e98099ee12c61a84d0e26b50c0189f953..75802a5eb4db83810cf88cc18cf6f5cb3f1e5d6f 100644 (file)
@@ -2430,29 +2430,29 @@ impl<S: Types $(+ $name)*> Server for S {}
 with_api!(Self, self_, declare_server_traits);
 struct G {}
 struct T {}
-struct Rustc;
-impl Types for Rustc {
+struct RustAnalyzer;
+impl Types for RustAnalyzer {
     type TokenStream = T;
     type Group = G;
 }
 
 fn make<T>() -> T { loop {} }
-impl TokenStream for Rustc {
+impl TokenStream for RustAnalyzer {
     fn new() -> Self::TokenStream {
         let group: Self::Group = make();
         make()
     }
 }"#,
         expect![[r#"
-            1061..1072 '{ loop {} }': T
-            1063..1070 'loop {}': !
-            1068..1070 '{}': ()
-            1136..1199 '{     ...     }': T
-            1150..1155 'group': G
-            1171..1175 'make': fn make<G>() -> G
-            1171..1177 'make()': G
-            1187..1191 'make': fn make<T>() -> T
-            1187..1193 'make()': T
+            1075..1086 '{ loop {} }': T
+            1077..1084 'loop {}': !
+            1082..1084 '{}': ()
+            1157..1220 '{     ...     }': T
+            1171..1176 'group': G
+            1192..1196 'make': fn make<G>() -> G
+            1192..1198 'make()': G
+            1208..1212 'make': fn make<T>() -> T
+            1208..1214 'make()': T
         "#]],
     );
 }
index 41e2e45703d9ae7546e7cb8c960731ece1e3659b..1c91ac0fa1b8fb66d821b166bc52de075a2ed4fb 100644 (file)
@@ -6,7 +6,7 @@
 
 #[allow(dead_code)]
 #[doc(hidden)]
-mod rustc_server;
+mod ra_server;
 
 use libloading::Library;
 use proc_macro_api::ProcMacroKind;
@@ -36,10 +36,10 @@ pub fn expand(
         macro_body: &tt::Subtree,
         attributes: Option<&tt::Subtree>,
     ) -> Result<tt::Subtree, PanicMessage> {
-        let parsed_body = rustc_server::TokenStream::with_subtree(macro_body.clone());
+        let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
 
-        let parsed_attributes = attributes.map_or(rustc_server::TokenStream::new(), |attr| {
-            rustc_server::TokenStream::with_subtree(attr.clone())
+        let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+            ra_server::TokenStream::with_subtree(attr.clone())
         });
 
         for proc_macro in &self.exported_macros {
@@ -49,7 +49,7 @@ pub fn expand(
                 } if *trait_name == macro_name => {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_body,
                         true,
                     );
@@ -60,7 +60,7 @@ pub fn expand(
                 {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_body,
                         true,
                     );
@@ -71,7 +71,7 @@ pub fn expand(
                 {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_attributes,
                         parsed_body,
                         true,
diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
new file mode 100644 (file)
index 0000000..ebdfca0
--- /dev/null
@@ -0,0 +1,819 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Clone)]
+pub struct TokenStream {
+    pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+    pub fn new() -> Self {
+        TokenStream { token_trees: Default::default() }
+    }
+
+    pub fn with_subtree(subtree: tt::Subtree) -> Self {
+        if subtree.delimiter.is_some() {
+            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+        } else {
+            TokenStream { token_trees: subtree.token_trees }
+        }
+    }
+
+    pub fn into_subtree(self) -> tt::Subtree {
+        tt::Subtree { delimiter: None, token_trees: self.token_trees }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.token_trees.is_empty()
+    }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+    fn from(tree: TokenTree) -> TokenStream {
+        TokenStream { token_trees: vec![tree] }
+    }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+        trees.into_iter().map(TokenStream::from).collect()
+    }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        let mut builder = TokenStreamBuilder::new();
+        streams.into_iter().for_each(|stream| builder.push(stream));
+        builder.build()
+    }
+}
+
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+        self.extend(trees.into_iter().map(TokenStream::from));
+    }
+}
+
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        for item in streams {
+            for tkn in item {
+                match tkn {
+                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+                        self.token_trees.extend(subtree.token_trees);
+                    }
+                    _ => {
+                        self.token_trees.push(tkn);
+                    }
+                }
+            }
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+    // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+    level: Level,
+    message: String,
+    spans: Vec<Span>,
+    children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+    /// Creates a new diagnostic with the given `level` and `message`.
+    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+    }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+    idents: HashMap<IdentData, u32>,
+    ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+    fn intern(&mut self, data: &IdentData) -> u32 {
+        if let Some(index) = self.idents.get(data) {
+            return *index;
+        }
+
+        let index = self.idents.len() as u32;
+        self.ident_data.push(data.clone());
+        self.idents.insert(data.clone(), index);
+        index
+    }
+
+    fn get(&self, index: u32) -> &IdentData {
+        &self.ident_data[index as usize]
+    }
+
+    #[allow(unused)]
+    fn get_mut(&mut self, index: u32) -> &mut IdentData {
+        self.ident_data.get_mut(index as usize).expect("Should be consistent")
+    }
+}
+
+pub struct TokenStreamBuilder {
+    acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+    use std::str::FromStr;
+
+    use super::{TokenStream, TokenTree};
+
+    /// An iterator over `TokenStream`'s `TokenTree`s.
+    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+    /// and returns whole groups as token trees.
+    impl IntoIterator for TokenStream {
+        type Item = TokenTree;
+        type IntoIter = super::IntoIter<TokenTree>;
+
+        fn into_iter(self) -> Self::IntoIter {
+            self.token_trees.into_iter()
+        }
+    }
+
+    type LexError = String;
+
+    /// Attempts to break the string into tokens and parse those tokens into a token stream.
+    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+    /// or characters not existing in the language.
+    /// All tokens in the parsed stream get `Span::call_site()` spans.
+    ///
+    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+    /// change these errors into `LexError`s later.
+    impl FromStr for TokenStream {
+        type Err = LexError;
+
+        fn from_str(src: &str) -> Result<TokenStream, LexError> {
+            let (subtree, _token_map) =
+                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+            Ok(TokenStream::with_subtree(subtree))
+        }
+    }
+
+    impl ToString for TokenStream {
+        fn to_string(&self) -> String {
+            tt::pretty(&self.token_trees)
+        }
+    }
+
+    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+        tt::Subtree {
+            delimiter: subtree
+                .delimiter
+                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            token_trees: subtree
+                .token_trees
+                .into_iter()
+                .map(token_tree_replace_token_ids_with_unspecified)
+                .collect(),
+        }
+    }
+
+    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+        match tt {
+            tt::TokenTree::Leaf(leaf) => {
+                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+            }
+            tt::TokenTree::Subtree(subtree) => {
+                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+            }
+        }
+    }
+
+    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+        match leaf {
+            tt::Leaf::Literal(lit) => {
+                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+            }
+            tt::Leaf::Punct(punct) => {
+                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+            }
+            tt::Leaf::Ident(ident) => {
+                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+            }
+        }
+    }
+}
+
+impl TokenStreamBuilder {
+    fn new() -> TokenStreamBuilder {
+        TokenStreamBuilder { acc: TokenStream::new() }
+    }
+
+    fn push(&mut self, stream: TokenStream) {
+        self.acc.extend(stream.into_iter())
+    }
+
+    fn build(self) -> TokenStream {
+        self.acc
+    }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+    trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+    ident_interner: IdentInterner,
+    // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+    type FreeFunctions = FreeFunctions;
+    type TokenStream = TokenStream;
+    type TokenStreamBuilder = TokenStreamBuilder;
+    type TokenStreamIter = TokenStreamIter;
+    type Group = Group;
+    type Punct = Punct;
+    type Ident = IdentId;
+    type Literal = Literal;
+    type SourceFile = SourceFile;
+    type Diagnostic = Diagnostic;
+    type Span = Span;
+    type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+        // FIXME: track env var accesses
+        // https://github.com/rust-lang/rust/pull/71858
+    }
+    fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+    fn new(&mut self) -> Self::TokenStream {
+        Self::TokenStream::new()
+    }
+
+    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+        stream.is_empty()
+    }
+    fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        use std::str::FromStr;
+
+        Self::TokenStream::from_str(src).expect("cannot parse string")
+    }
+    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+        stream.to_string()
+    }
+    fn from_token_tree(
+        &mut self,
+        tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+    ) -> Self::TokenStream {
+        match tree {
+            bridge::TokenTree::Group(group) => {
+                let tree = TokenTree::from(group);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Ident(IdentId(index)) => {
+                let IdentData(ident) = self.ident_interner.get(index).clone();
+                let ident: tt::Ident = ident;
+                let leaf = tt::Leaf::from(ident);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Literal(literal) => {
+                let leaf = tt::Leaf::from(literal);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Punct(p) => {
+                let leaf = tt::Leaf::from(p);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+        }
+    }
+
+    fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
+        let trees: Vec<TokenTree> = stream.into_iter().collect();
+        TokenStreamIter { trees: trees.into_iter() }
+    }
+
+    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+        Ok(self_.clone())
+    }
+}
+
+impl server::TokenStreamBuilder for RustAnalyzer {
+    fn new(&mut self) -> Self::TokenStreamBuilder {
+        Self::TokenStreamBuilder::new()
+    }
+    fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
+        builder.push(stream)
+    }
+    fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
+        builder.build()
+    }
+}
+
+impl server::TokenStreamIter for RustAnalyzer {
+    fn next(
+        &mut self,
+        iter: &mut Self::TokenStreamIter,
+    ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+        iter.trees.next().map(|tree| match tree {
+            TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
+            TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+                bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+            }
+            TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
+            TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+        })
+    }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+    let kind = match d {
+        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+        bridge::Delimiter::None => return None,
+    };
+    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+    match d.map(|it| it.kind) {
+        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+        None => bridge::Delimiter::None,
+    }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+    match spacing {
+        bridge::Spacing::Alone => Spacing::Alone,
+        bridge::Spacing::Joint => Spacing::Joint,
+    }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+    match spacing {
+        Spacing::Alone => bridge::Spacing::Alone,
+        Spacing::Joint => bridge::Spacing::Joint,
+    }
+}
+
+impl server::Group for RustAnalyzer {
+    fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
+        Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
+    }
+    fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+        delim_to_external(group.delimiter)
+    }
+
+    // NOTE: Return value of do not include delimiter
+    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+        TokenStream { token_trees: group.token_trees.clone() }
+    }
+
+    fn span(&mut self, group: &Self::Group) -> Self::Span {
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+    }
+
+    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+        if let Some(delim) = &mut group.delimiter {
+            delim.id = span;
+        }
+    }
+
+    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+        // FIXME we only store one `TokenId` for the delimiters
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+    }
+
+    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+        // FIXME we only store one `TokenId` for the delimiters
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+    }
+}
+
+impl server::Punct for RustAnalyzer {
+    fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+        tt::Punct {
+            char: ch,
+            spacing: spacing_to_internal(spacing),
+            id: tt::TokenId::unspecified(),
+        }
+    }
+    fn as_char(&mut self, punct: Self::Punct) -> char {
+        punct.char
+    }
+    fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+        spacing_to_external(punct.spacing)
+    }
+    fn span(&mut self, punct: Self::Punct) -> Self::Span {
+        punct.id
+    }
+    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+        tt::Punct { id: span, ..punct }
+    }
+}
+
+impl server::Ident for RustAnalyzer {
+    fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+        IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+    }
+
+    fn span(&mut self, ident: Self::Ident) -> Self::Span {
+        self.ident_interner.get(ident.0).0.id
+    }
+    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+        let data = self.ident_interner.get(ident.0);
+        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+        IdentId(self.ident_interner.intern(&new))
+    }
+}
+
+impl server::Literal for RustAnalyzer {
+    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+        // They must still be present to be ABI-compatible and work with upstream proc_macro.
+        "".to_owned()
+    }
+    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+    }
+    fn symbol(&mut self, literal: &Self::Literal) -> String {
+        literal.text.to_string()
+    }
+    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+        None
+    }
+
+    fn to_string(&mut self, literal: &Self::Literal) -> String {
+        literal.to_string()
+    }
+
+    fn integer(&mut self, n: &str) -> Self::Literal {
+        let n = match n.parse::<i128>() {
+            Ok(n) => n.to_string(),
+            Err(_) => n.parse::<u128>().unwrap().to_string(),
+        };
+        Literal { text: n.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+        macro_rules! def_suffixed_integer {
+            ($kind:ident, $($ty:ty),*) => {
+                match $kind {
+                    $(
+                        stringify!($ty) => {
+                            let n: $ty = n.parse().unwrap();
+                            format!(concat!("{}", stringify!($ty)), n)
+                        }
+                    )*
+                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+                }
+            }
+        }
+
+        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn float(&mut self, n: &str) -> Self::Literal {
+        let n: f64 = n.parse().unwrap();
+        let mut text = f64::to_string(&n);
+        if !text.contains('.') {
+            text += ".0"
+        }
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn f32(&mut self, n: &str) -> Self::Literal {
+        let n: f32 = n.parse().unwrap();
+        let text = format!("{}f32", n);
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn f64(&mut self, n: &str) -> Self::Literal {
+        let n: f64 = n.parse().unwrap();
+        let text = format!("{}f64", n);
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn string(&mut self, string: &str) -> Self::Literal {
+        let mut escaped = String::new();
+        for ch in string.chars() {
+            escaped.extend(ch.escape_debug());
+        }
+        Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn character(&mut self, ch: char) -> Self::Literal {
+        Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+        let string = bytes
+            .iter()
+            .cloned()
+            .flat_map(ascii::escape_default)
+            .map(Into::<char>::into)
+            .collect::<String>();
+
+        Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+        literal.id
+    }
+
+    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+        literal.id = span;
+    }
+
+    fn subspan(
+        &mut self,
+        _literal: &Self::Literal,
+        _start: Bound<usize>,
+        _end: Bound<usize>,
+    ) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+}
+
+impl server::SourceFile for RustAnalyzer {
+    // FIXME these are all stubs
+    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+        true
+    }
+    fn path(&mut self, _file: &Self::SourceFile) -> String {
+        String::new()
+    }
+    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+        true
+    }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+        let mut diag = Diagnostic::new(level, msg);
+        diag.spans = spans;
+        diag
+    }
+
+    fn sub(
+        &mut self,
+        _diag: &mut Self::Diagnostic,
+        _level: Level,
+        _msg: &str,
+        _spans: Self::MultiSpan,
+    ) {
+        // FIXME handle diagnostic
+        //
+    }
+
+    fn emit(&mut self, _diag: Self::Diagnostic) {
+        // FIXME handle diagnostic
+        // diag.emit()
+    }
+}
+
+impl server::Span for RustAnalyzer {
+    fn debug(&mut self, span: Self::Span) -> String {
+        format!("{:?}", span.0)
+    }
+    fn def_site(&mut self) -> Self::Span {
+        // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+    fn call_site(&mut self) -> Self::Span {
+        // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+        SourceFile {}
+    }
+    fn save_span(&mut self, _span: Self::Span) -> usize {
+        // FIXME stub
+        0
+    }
+    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+        // FIXME stub
+        tt::TokenId::unspecified()
+    }
+    /// Recent feature, not yet in the proc_macro
+    ///
+    /// See PR:
+    /// https://github.com/rust-lang/rust/pull/55780
+    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+        None
+    }
+
+    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+    fn source(&mut self, span: Self::Span) -> Self::Span {
+        // FIXME handle span
+        span
+    }
+    fn start(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn end(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+        // Just return the first span again, because some macros will unwrap the result.
+        Some(first)
+    }
+    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+
+    fn mixed_site(&mut self) -> Self::Span {
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+
+    fn after(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+
+    fn before(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+    fn new(&mut self) -> Self::MultiSpan {
+        // FIXME handle span
+        vec![]
+    }
+
+    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+        //TODP
+        other.push(span)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::super::proc_macro::bridge::server::Literal;
+    use super::*;
+
+    #[test]
+    fn test_ra_server_literals() {
+        let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+        assert_eq!(srv.integer("1234").text, "1234");
+
+        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+        assert_eq!(srv.float("0").text, "0.0");
+        assert_eq!(srv.float("15684.5867").text, "15684.5867");
+        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+        assert_eq!(srv.character('c').text, "'c'");
+        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+        // u128::max
+        assert_eq!(
+            srv.integer("340282366920938463463374607431768211455").text,
+            "340282366920938463463374607431768211455"
+        );
+        // i128::min
+        assert_eq!(
+            srv.integer("-170141183460469231731687303715884105728").text,
+            "-170141183460469231731687303715884105728"
+        );
+    }
+
+    #[test]
+    fn test_ra_server_to_string() {
+        let s = TokenStream {
+            token_trees: vec![
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "struct".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "T".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: Some(tt::Delimiter {
+                        id: tt::TokenId::unspecified(),
+                        kind: tt::DelimiterKind::Brace,
+                    }),
+                    token_trees: vec![],
+                }),
+            ],
+        };
+
+        assert_eq!(s.to_string(), "struct T {}");
+    }
+
+    #[test]
+    fn test_ra_server_from_str() {
+        use std::str::FromStr;
+        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+            delimiter: Some(tt::Delimiter {
+                id: tt::TokenId::unspecified(),
+                kind: tt::DelimiterKind::Parenthesis,
+            }),
+            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "a".into(),
+                id: tt::TokenId::unspecified(),
+            }))],
+        });
+
+        let t1 = TokenStream::from_str("(a)").unwrap();
+        assert_eq!(t1.token_trees.len(), 1);
+        assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+        let t2 = TokenStream::from_str("(a);").unwrap();
+        assert_eq!(t2.token_trees.len(), 2);
+        assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+        let underscore = TokenStream::from_str("_").unwrap();
+        assert_eq!(
+            underscore.token_trees[0],
+            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "_".into(),
+                id: tt::TokenId::unspecified(),
+            }))
+        );
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/rustc_server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/rustc_server.rs
deleted file mode 100644 (file)
index 5c596bc..0000000
+++ /dev/null
@@ -1,819 +0,0 @@
-//! Rustc proc-macro server implementation with tt
-//!
-//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
-//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
-//! we could provide any TokenStream implementation.
-//! The original idea from fedochet is using proc-macro2 as backend,
-//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-
-use super::proc_macro::bridge::{self, server};
-
-use std::collections::HashMap;
-use std::hash::Hash;
-use std::iter::FromIterator;
-use std::ops::Bound;
-use std::{ascii, vec::IntoIter};
-
-type Group = tt::Subtree;
-type TokenTree = tt::TokenTree;
-type Punct = tt::Punct;
-type Spacing = tt::Spacing;
-type Literal = tt::Literal;
-type Span = tt::TokenId;
-
-#[derive(Debug, Clone)]
-pub struct TokenStream {
-    pub token_trees: Vec<TokenTree>,
-}
-
-impl TokenStream {
-    pub fn new() -> Self {
-        TokenStream { token_trees: Default::default() }
-    }
-
-    pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        if subtree.delimiter.is_some() {
-            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
-        } else {
-            TokenStream { token_trees: subtree.token_trees }
-        }
-    }
-
-    pub fn into_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self.token_trees }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.token_trees.is_empty()
-    }
-}
-
-/// Creates a token stream containing a single token tree.
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream { token_trees: vec![tree] }
-    }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
-        trees.into_iter().map(TokenStream::from).collect()
-    }
-}
-
-/// A "flattening" operation on token streams, collects token trees
-/// from multiple token streams into a single stream.
-impl FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
-        let mut builder = TokenStreamBuilder::new();
-        streams.into_iter().for_each(|stream| builder.push(stream));
-        builder.build()
-    }
-}
-
-impl Extend<TokenTree> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
-        self.extend(trees.into_iter().map(TokenStream::from));
-    }
-}
-
-impl Extend<TokenStream> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
-        for item in streams {
-            for tkn in item {
-                match tkn {
-                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
-                        self.token_trees.extend(subtree.token_trees);
-                    }
-                    _ => {
-                        self.token_trees.push(tkn);
-                    }
-                }
-            }
-        }
-    }
-}
-
-#[derive(Clone)]
-pub struct SourceFile {
-    // FIXME stub
-}
-
-type Level = super::proc_macro::Level;
-type LineColumn = super::proc_macro::LineColumn;
-
-/// A structure representing a diagnostic message and associated children
-/// messages.
-#[derive(Clone, Debug)]
-pub struct Diagnostic {
-    level: Level,
-    message: String,
-    spans: Vec<Span>,
-    children: Vec<Diagnostic>,
-}
-
-impl Diagnostic {
-    /// Creates a new diagnostic with the given `level` and `message`.
-    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
-        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
-    }
-}
-
-// Rustc Server Ident has to be `Copyable`
-// We use a stub here for bypassing
-#[derive(Hash, Eq, PartialEq, Copy, Clone)]
-pub struct IdentId(u32);
-
-#[derive(Clone, Hash, Eq, PartialEq)]
-struct IdentData(tt::Ident);
-
-#[derive(Default)]
-struct IdentInterner {
-    idents: HashMap<IdentData, u32>,
-    ident_data: Vec<IdentData>,
-}
-
-impl IdentInterner {
-    fn intern(&mut self, data: &IdentData) -> u32 {
-        if let Some(index) = self.idents.get(data) {
-            return *index;
-        }
-
-        let index = self.idents.len() as u32;
-        self.ident_data.push(data.clone());
-        self.idents.insert(data.clone(), index);
-        index
-    }
-
-    fn get(&self, index: u32) -> &IdentData {
-        &self.ident_data[index as usize]
-    }
-
-    #[allow(unused)]
-    fn get_mut(&mut self, index: u32) -> &mut IdentData {
-        self.ident_data.get_mut(index as usize).expect("Should be consistent")
-    }
-}
-
-pub struct TokenStreamBuilder {
-    acc: TokenStream,
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
-    use std::str::FromStr;
-
-    use super::{TokenStream, TokenTree};
-
-    /// An iterator over `TokenStream`'s `TokenTree`s.
-    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
-    /// and returns whole groups as token trees.
-    impl IntoIterator for TokenStream {
-        type Item = TokenTree;
-        type IntoIter = super::IntoIter<TokenTree>;
-
-        fn into_iter(self) -> Self::IntoIter {
-            self.token_trees.into_iter()
-        }
-    }
-
-    type LexError = String;
-
-    /// Attempts to break the string into tokens and parse those tokens into a token stream.
-    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
-    /// or characters not existing in the language.
-    /// All tokens in the parsed stream get `Span::call_site()` spans.
-    ///
-    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
-    /// change these errors into `LexError`s later.
-    impl FromStr for TokenStream {
-        type Err = LexError;
-
-        fn from_str(src: &str) -> Result<TokenStream, LexError> {
-            let (subtree, _token_map) =
-                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
-
-            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
-            Ok(TokenStream::with_subtree(subtree))
-        }
-    }
-
-    impl ToString for TokenStream {
-        fn to_string(&self) -> String {
-            tt::pretty(&self.token_trees)
-        }
-    }
-
-    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
-        tt::Subtree {
-            delimiter: subtree
-                .delimiter
-                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
-            token_trees: subtree
-                .token_trees
-                .into_iter()
-                .map(token_tree_replace_token_ids_with_unspecified)
-                .collect(),
-        }
-    }
-
-    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
-        match tt {
-            tt::TokenTree::Leaf(leaf) => {
-                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
-            }
-            tt::TokenTree::Subtree(subtree) => {
-                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
-            }
-        }
-    }
-
-    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
-        match leaf {
-            tt::Leaf::Literal(lit) => {
-                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
-            }
-            tt::Leaf::Punct(punct) => {
-                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
-            }
-            tt::Leaf::Ident(ident) => {
-                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
-            }
-        }
-    }
-}
-
-impl TokenStreamBuilder {
-    fn new() -> TokenStreamBuilder {
-        TokenStreamBuilder { acc: TokenStream::new() }
-    }
-
-    fn push(&mut self, stream: TokenStream) {
-        self.acc.extend(stream.into_iter())
-    }
-
-    fn build(self) -> TokenStream {
-        self.acc
-    }
-}
-
-pub struct FreeFunctions;
-
-#[derive(Clone)]
-pub struct TokenStreamIter {
-    trees: IntoIter<TokenTree>,
-}
-
-#[derive(Default)]
-pub struct Rustc {
-    ident_interner: IdentInterner,
-    // FIXME: store span information here.
-}
-
-impl server::Types for Rustc {
-    type FreeFunctions = FreeFunctions;
-    type TokenStream = TokenStream;
-    type TokenStreamBuilder = TokenStreamBuilder;
-    type TokenStreamIter = TokenStreamIter;
-    type Group = Group;
-    type Punct = Punct;
-    type Ident = IdentId;
-    type Literal = Literal;
-    type SourceFile = SourceFile;
-    type Diagnostic = Diagnostic;
-    type Span = Span;
-    type MultiSpan = Vec<Span>;
-}
-
-impl server::FreeFunctions for Rustc {
-    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
-        // FIXME: track env var accesses
-        // https://github.com/rust-lang/rust/pull/71858
-    }
-    fn track_path(&mut self, _path: &str) {}
-}
-
-impl server::TokenStream for Rustc {
-    fn new(&mut self) -> Self::TokenStream {
-        Self::TokenStream::new()
-    }
-
-    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
-        stream.is_empty()
-    }
-    fn from_str(&mut self, src: &str) -> Self::TokenStream {
-        use std::str::FromStr;
-
-        Self::TokenStream::from_str(src).expect("cannot parse string")
-    }
-    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
-        stream.to_string()
-    }
-    fn from_token_tree(
-        &mut self,
-        tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
-    ) -> Self::TokenStream {
-        match tree {
-            bridge::TokenTree::Group(group) => {
-                let tree = TokenTree::from(group);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Ident(IdentId(index)) => {
-                let IdentData(ident) = self.ident_interner.get(index).clone();
-                let ident: tt::Ident = ident;
-                let leaf = tt::Leaf::from(ident);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Literal(literal) => {
-                let leaf = tt::Leaf::from(literal);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Punct(p) => {
-                let leaf = tt::Leaf::from(p);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-        }
-    }
-
-    fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
-        let trees: Vec<TokenTree> = stream.into_iter().collect();
-        TokenStreamIter { trees: trees.into_iter() }
-    }
-
-    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
-        Ok(self_.clone())
-    }
-}
-
-impl server::TokenStreamBuilder for Rustc {
-    fn new(&mut self) -> Self::TokenStreamBuilder {
-        Self::TokenStreamBuilder::new()
-    }
-    fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
-        builder.push(stream)
-    }
-    fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
-        builder.build()
-    }
-}
-
-impl server::TokenStreamIter for Rustc {
-    fn next(
-        &mut self,
-        iter: &mut Self::TokenStreamIter,
-    ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
-        iter.trees.next().map(|tree| match tree {
-            TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
-            TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
-                bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
-            }
-            TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
-            TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
-        })
-    }
-}
-
-fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
-    let kind = match d {
-        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
-        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
-        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
-        bridge::Delimiter::None => return None,
-    };
-    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
-}
-
-fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
-    match d.map(|it| it.kind) {
-        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
-        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
-        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
-        None => bridge::Delimiter::None,
-    }
-}
-
-fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
-    match spacing {
-        bridge::Spacing::Alone => Spacing::Alone,
-        bridge::Spacing::Joint => Spacing::Joint,
-    }
-}
-
-fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
-    match spacing {
-        Spacing::Alone => bridge::Spacing::Alone,
-        Spacing::Joint => bridge::Spacing::Joint,
-    }
-}
-
-impl server::Group for Rustc {
-    fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
-        Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
-    }
-    fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
-        delim_to_external(group.delimiter)
-    }
-
-    // NOTE: Return value of do not include delimiter
-    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
-        TokenStream { token_trees: group.token_trees.clone() }
-    }
-
-    fn span(&mut self, group: &Self::Group) -> Self::Span {
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-
-    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
-        if let Some(delim) = &mut group.delimiter {
-            delim.id = span;
-        }
-    }
-
-    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-
-    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-}
-
-impl server::Punct for Rustc {
-    fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
-        tt::Punct {
-            char: ch,
-            spacing: spacing_to_internal(spacing),
-            id: tt::TokenId::unspecified(),
-        }
-    }
-    fn as_char(&mut self, punct: Self::Punct) -> char {
-        punct.char
-    }
-    fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
-        spacing_to_external(punct.spacing)
-    }
-    fn span(&mut self, punct: Self::Punct) -> Self::Span {
-        punct.id
-    }
-    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
-        tt::Punct { id: span, ..punct }
-    }
-}
-
-impl server::Ident for Rustc {
-    fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
-        IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
-    }
-
-    fn span(&mut self, ident: Self::Ident) -> Self::Span {
-        self.ident_interner.get(ident.0).0.id
-    }
-    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
-        let data = self.ident_interner.get(ident.0);
-        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
-        IdentId(self.ident_interner.intern(&new))
-    }
-}
-
-impl server::Literal for Rustc {
-    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
-        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
-        // They must still be present to be ABI-compatible and work with upstream proc_macro.
-        "".to_owned()
-    }
-    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
-        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
-    }
-    fn symbol(&mut self, literal: &Self::Literal) -> String {
-        literal.text.to_string()
-    }
-    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
-        None
-    }
-
-    fn to_string(&mut self, literal: &Self::Literal) -> String {
-        literal.to_string()
-    }
-
-    fn integer(&mut self, n: &str) -> Self::Literal {
-        let n = match n.parse::<i128>() {
-            Ok(n) => n.to_string(),
-            Err(_) => n.parse::<u128>().unwrap().to_string(),
-        };
-        Literal { text: n.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
-        macro_rules! def_suffixed_integer {
-            ($kind:ident, $($ty:ty),*) => {
-                match $kind {
-                    $(
-                        stringify!($ty) => {
-                            let n: $ty = n.parse().unwrap();
-                            format!(concat!("{}", stringify!($ty)), n)
-                        }
-                    )*
-                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
-                }
-            }
-        }
-
-        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
-
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn float(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let mut text = f64::to_string(&n);
-        if !text.contains('.') {
-            text += ".0"
-        }
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f32(&mut self, n: &str) -> Self::Literal {
-        let n: f32 = n.parse().unwrap();
-        let text = format!("{}f32", n);
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f64(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let text = format!("{}f64", n);
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn string(&mut self, string: &str) -> Self::Literal {
-        let mut escaped = String::new();
-        for ch in string.chars() {
-            escaped.extend(ch.escape_debug());
-        }
-        Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn character(&mut self, ch: char) -> Self::Literal {
-        Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
-        let string = bytes
-            .iter()
-            .cloned()
-            .flat_map(ascii::escape_default)
-            .map(Into::<char>::into)
-            .collect::<String>();
-
-        Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
-        literal.id
-    }
-
-    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
-        literal.id = span;
-    }
-
-    fn subspan(
-        &mut self,
-        _literal: &Self::Literal,
-        _start: Bound<usize>,
-        _end: Bound<usize>,
-    ) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-}
-
-impl server::SourceFile for Rustc {
-    // FIXME these are all stubs
-    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
-        true
-    }
-    fn path(&mut self, _file: &Self::SourceFile) -> String {
-        String::new()
-    }
-    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
-        true
-    }
-}
-
-impl server::Diagnostic for Rustc {
-    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
-        let mut diag = Diagnostic::new(level, msg);
-        diag.spans = spans;
-        diag
-    }
-
-    fn sub(
-        &mut self,
-        _diag: &mut Self::Diagnostic,
-        _level: Level,
-        _msg: &str,
-        _spans: Self::MultiSpan,
-    ) {
-        // FIXME handle diagnostic
-        //
-    }
-
-    fn emit(&mut self, _diag: Self::Diagnostic) {
-        // FIXME handle diagnostic
-        // diag.emit()
-    }
-}
-
-impl server::Span for Rustc {
-    fn debug(&mut self, span: Self::Span) -> String {
-        format!("{:?}", span.0)
-    }
-    fn def_site(&mut self) -> Self::Span {
-        // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-    fn call_site(&mut self) -> Self::Span {
-        // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
-        SourceFile {}
-    }
-    fn save_span(&mut self, _span: Self::Span) -> usize {
-        // FIXME stub
-        0
-    }
-    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
-        // FIXME stub
-        tt::TokenId::unspecified()
-    }
-    /// Recent feature, not yet in the proc_macro
-    ///
-    /// See PR:
-    /// https://github.com/rust-lang/rust/pull/55780
-    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
-        None
-    }
-
-    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-    fn source(&mut self, span: Self::Span) -> Self::Span {
-        // FIXME handle span
-        span
-    }
-    fn start(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn end(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
-        // Just return the first span again, because some macros will unwrap the result.
-        Some(first)
-    }
-    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn mixed_site(&mut self) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn after(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-
-    fn before(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-}
-
-impl server::MultiSpan for Rustc {
-    fn new(&mut self) -> Self::MultiSpan {
-        // FIXME handle span
-        vec![]
-    }
-
-    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
-        //TODP
-        other.push(span)
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::super::proc_macro::bridge::server::Literal;
-    use super::*;
-
-    #[test]
-    fn test_rustc_server_literals() {
-        let mut srv = Rustc { ident_interner: IdentInterner::default() };
-        assert_eq!(srv.integer("1234").text, "1234");
-
-        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
-        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
-        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
-        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
-        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
-        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
-        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
-        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
-        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
-        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
-        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
-        assert_eq!(srv.float("0").text, "0.0");
-        assert_eq!(srv.float("15684.5867").text, "15684.5867");
-        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
-        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
-
-        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
-        assert_eq!(srv.character('c').text, "'c'");
-        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
-
-        // u128::max
-        assert_eq!(
-            srv.integer("340282366920938463463374607431768211455").text,
-            "340282366920938463463374607431768211455"
-        );
-        // i128::min
-        assert_eq!(
-            srv.integer("-170141183460469231731687303715884105728").text,
-            "-170141183460469231731687303715884105728"
-        );
-    }
-
-    #[test]
-    fn test_rustc_server_to_string() {
-        let s = TokenStream {
-            token_trees: vec![
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "struct".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "T".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
-                        kind: tt::DelimiterKind::Brace,
-                    }),
-                    token_trees: vec![],
-                }),
-            ],
-        };
-
-        assert_eq!(s.to_string(), "struct T {}");
-    }
-
-    #[test]
-    fn test_rustc_server_from_str() {
-        use std::str::FromStr;
-        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                id: tt::TokenId::unspecified(),
-                kind: tt::DelimiterKind::Parenthesis,
-            }),
-            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "a".into(),
-                id: tt::TokenId::unspecified(),
-            }))],
-        });
-
-        let t1 = TokenStream::from_str("(a)").unwrap();
-        assert_eq!(t1.token_trees.len(), 1);
-        assert_eq!(t1.token_trees[0], subtree_paren_a);
-
-        let t2 = TokenStream::from_str("(a);").unwrap();
-        assert_eq!(t2.token_trees.len(), 2);
-        assert_eq!(t2.token_trees[0], subtree_paren_a);
-
-        let underscore = TokenStream::from_str("_").unwrap();
-        assert_eq!(
-            underscore.token_trees[0],
-            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "_".into(),
-                id: tt::TokenId::unspecified(),
-            }))
-        );
-    }
-}
index c253b10994dd61ff736a5c3543c6ecc8a537057f..76e89e3191a67230c3e9c34e67d72d7208dfdd92 100644 (file)
@@ -6,14 +6,14 @@
 
 #[allow(dead_code)]
 #[doc(hidden)]
-mod rustc_server;
+mod ra_server;
 
 use libloading::Library;
 use proc_macro_api::ProcMacroKind;
 
 use super::PanicMessage;
 
-pub use rustc_server::TokenStream;
+pub use ra_server::TokenStream;
 
 pub(crate) struct Abi {
     exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
@@ -50,7 +50,7 @@ pub fn expand(
                 } if *trait_name == macro_name => {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_body,
                         true,
                     );
@@ -61,7 +61,7 @@ pub fn expand(
                 {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_body,
                         true,
                     );
@@ -72,7 +72,7 @@ pub fn expand(
                 {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_attributes,
                         parsed_body,
                         true,
diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
new file mode 100644 (file)
index 0000000..05a565f
--- /dev/null
@@ -0,0 +1,834 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+    pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+    pub fn new() -> Self {
+        TokenStream::default()
+    }
+
+    pub fn with_subtree(subtree: tt::Subtree) -> Self {
+        if subtree.delimiter.is_some() {
+            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+        } else {
+            TokenStream { token_trees: subtree.token_trees }
+        }
+    }
+
+    pub fn into_subtree(self) -> tt::Subtree {
+        tt::Subtree { delimiter: None, token_trees: self.token_trees }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.token_trees.is_empty()
+    }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+    fn from(tree: TokenTree) -> TokenStream {
+        TokenStream { token_trees: vec![tree] }
+    }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+        trees.into_iter().map(TokenStream::from).collect()
+    }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        let mut builder = TokenStreamBuilder::new();
+        streams.into_iter().for_each(|stream| builder.push(stream));
+        builder.build()
+    }
+}
+
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+        self.extend(trees.into_iter().map(TokenStream::from));
+    }
+}
+
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        for item in streams {
+            for tkn in item {
+                match tkn {
+                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+                        self.token_trees.extend(subtree.token_trees);
+                    }
+                    _ => {
+                        self.token_trees.push(tkn);
+                    }
+                }
+            }
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+    // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+    level: Level,
+    message: String,
+    spans: Vec<Span>,
+    children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+    /// Creates a new diagnostic with the given `level` and `message`.
+    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+    }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+    idents: HashMap<IdentData, u32>,
+    ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+    fn intern(&mut self, data: &IdentData) -> u32 {
+        if let Some(index) = self.idents.get(data) {
+            return *index;
+        }
+
+        let index = self.idents.len() as u32;
+        self.ident_data.push(data.clone());
+        self.idents.insert(data.clone(), index);
+        index
+    }
+
+    fn get(&self, index: u32) -> &IdentData {
+        &self.ident_data[index as usize]
+    }
+
+    #[allow(unused)]
+    fn get_mut(&mut self, index: u32) -> &mut IdentData {
+        self.ident_data.get_mut(index as usize).expect("Should be consistent")
+    }
+}
+
+pub struct TokenStreamBuilder {
+    acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+    use std::str::FromStr;
+
+    use super::{TokenStream, TokenTree};
+
+    /// An iterator over `TokenStream`'s `TokenTree`s.
+    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+    /// and returns whole groups as token trees.
+    impl IntoIterator for TokenStream {
+        type Item = TokenTree;
+        type IntoIter = super::IntoIter<TokenTree>;
+
+        fn into_iter(self) -> Self::IntoIter {
+            self.token_trees.into_iter()
+        }
+    }
+
+    type LexError = String;
+
+    /// Attempts to break the string into tokens and parse those tokens into a token stream.
+    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+    /// or characters not existing in the language.
+    /// All tokens in the parsed stream get `Span::call_site()` spans.
+    ///
+    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+    /// change these errors into `LexError`s later.
+    impl FromStr for TokenStream {
+        type Err = LexError;
+
+        fn from_str(src: &str) -> Result<TokenStream, LexError> {
+            let (subtree, _token_map) =
+                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+            Ok(TokenStream::with_subtree(subtree))
+        }
+    }
+
+    impl ToString for TokenStream {
+        fn to_string(&self) -> String {
+            tt::pretty(&self.token_trees)
+        }
+    }
+
+    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+        tt::Subtree {
+            delimiter: subtree
+                .delimiter
+                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            token_trees: subtree
+                .token_trees
+                .into_iter()
+                .map(token_tree_replace_token_ids_with_unspecified)
+                .collect(),
+        }
+    }
+
+    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+        match tt {
+            tt::TokenTree::Leaf(leaf) => {
+                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+            }
+            tt::TokenTree::Subtree(subtree) => {
+                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+            }
+        }
+    }
+
+    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+        match leaf {
+            tt::Leaf::Literal(lit) => {
+                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+            }
+            tt::Leaf::Punct(punct) => {
+                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+            }
+            tt::Leaf::Ident(ident) => {
+                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+            }
+        }
+    }
+}
+
+impl TokenStreamBuilder {
+    fn new() -> TokenStreamBuilder {
+        TokenStreamBuilder { acc: TokenStream::new() }
+    }
+
+    fn push(&mut self, stream: TokenStream) {
+        self.acc.extend(stream.into_iter())
+    }
+
+    fn build(self) -> TokenStream {
+        self.acc
+    }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+    trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+    ident_interner: IdentInterner,
+    // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+    type FreeFunctions = FreeFunctions;
+    type TokenStream = TokenStream;
+    type Group = Group;
+    type Punct = Punct;
+    type Ident = IdentId;
+    type Literal = Literal;
+    type SourceFile = SourceFile;
+    type Diagnostic = Diagnostic;
+    type Span = Span;
+    type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+        // FIXME: track env var accesses
+        // https://github.com/rust-lang/rust/pull/71858
+    }
+    fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+        stream.is_empty()
+    }
+    fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        use std::str::FromStr;
+
+        Self::TokenStream::from_str(src).expect("cannot parse string")
+    }
+    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+        stream.to_string()
+    }
+    fn from_token_tree(
+        &mut self,
+        tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+    ) -> Self::TokenStream {
+        match tree {
+            bridge::TokenTree::Group(group) => {
+                let tree = TokenTree::from(group);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Ident(IdentId(index)) => {
+                let IdentData(ident) = self.ident_interner.get(index).clone();
+                let ident: tt::Ident = ident;
+                let leaf = tt::Leaf::from(ident);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Literal(literal) => {
+                let leaf = tt::Leaf::from(literal);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Punct(p) => {
+                let leaf = tt::Leaf::from(p);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+        }
+    }
+
+    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+        Ok(self_.clone())
+    }
+
+    fn concat_trees(
+        &mut self,
+        base: Option<Self::TokenStream>,
+        trees: Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
+    ) -> Self::TokenStream {
+        let mut builder = TokenStreamBuilder::new();
+        if let Some(base) = base {
+            builder.push(base);
+        }
+        for tree in trees {
+            builder.push(self.from_token_tree(tree));
+        }
+        builder.build()
+    }
+
+    fn concat_streams(
+        &mut self,
+        base: Option<Self::TokenStream>,
+        streams: Vec<Self::TokenStream>,
+    ) -> Self::TokenStream {
+        let mut builder = TokenStreamBuilder::new();
+        if let Some(base) = base {
+            builder.push(base);
+        }
+        for stream in streams {
+            builder.push(stream);
+        }
+        builder.build()
+    }
+
+    fn into_trees(
+        &mut self,
+        stream: Self::TokenStream,
+    ) -> Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+        stream
+            .into_iter()
+            .map(|tree| match tree {
+                tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+                    bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+                }
+                tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+                tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+                tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(subtree),
+            })
+            .collect()
+    }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+    let kind = match d {
+        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+        bridge::Delimiter::None => return None,
+    };
+    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+    match d.map(|it| it.kind) {
+        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+        None => bridge::Delimiter::None,
+    }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+    match spacing {
+        bridge::Spacing::Alone => Spacing::Alone,
+        bridge::Spacing::Joint => Spacing::Joint,
+    }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+    match spacing {
+        Spacing::Alone => bridge::Spacing::Alone,
+        Spacing::Joint => bridge::Spacing::Joint,
+    }
+}
+
+impl server::Group for RustAnalyzer {
+    fn new(
+        &mut self,
+        delimiter: bridge::Delimiter,
+        stream: Option<Self::TokenStream>,
+    ) -> Self::Group {
+        Self::Group {
+            delimiter: delim_to_internal(delimiter),
+            token_trees: stream.unwrap_or_default().token_trees,
+        }
+    }
+    fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+        delim_to_external(group.delimiter)
+    }
+
+    // NOTE: Return value of do not include delimiter
+    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+        TokenStream { token_trees: group.token_trees.clone() }
+    }
+
+    fn span(&mut self, group: &Self::Group) -> Self::Span {
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+    }
+
+    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+        if let Some(delim) = &mut group.delimiter {
+            delim.id = span;
+        }
+    }
+
+    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+        // FIXME we only store one `TokenId` for the delimiters
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+    }
+
+    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+        // FIXME we only store one `TokenId` for the delimiters
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+    }
+}
+
+impl server::Punct for RustAnalyzer {
+    fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+        tt::Punct {
+            char: ch,
+            spacing: spacing_to_internal(spacing),
+            id: tt::TokenId::unspecified(),
+        }
+    }
+    fn as_char(&mut self, punct: Self::Punct) -> char {
+        punct.char
+    }
+    fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+        spacing_to_external(punct.spacing)
+    }
+    fn span(&mut self, punct: Self::Punct) -> Self::Span {
+        punct.id
+    }
+    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+        tt::Punct { id: span, ..punct }
+    }
+}
+
+impl server::Ident for RustAnalyzer {
+    fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+        IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+    }
+
+    fn span(&mut self, ident: Self::Ident) -> Self::Span {
+        self.ident_interner.get(ident.0).0.id
+    }
+    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+        let data = self.ident_interner.get(ident.0);
+        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+        IdentId(self.ident_interner.intern(&new))
+    }
+}
+
+impl server::Literal for RustAnalyzer {
+    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+        // They must still be present to be ABI-compatible and work with upstream proc_macro.
+        "".to_owned()
+    }
+    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+    }
+    fn symbol(&mut self, literal: &Self::Literal) -> String {
+        literal.text.to_string()
+    }
+    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+        None
+    }
+
+    fn to_string(&mut self, literal: &Self::Literal) -> String {
+        literal.to_string()
+    }
+
+    fn integer(&mut self, n: &str) -> Self::Literal {
+        let n = match n.parse::<i128>() {
+            Ok(n) => n.to_string(),
+            Err(_) => n.parse::<u128>().unwrap().to_string(),
+        };
+        Literal { text: n.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+        macro_rules! def_suffixed_integer {
+            ($kind:ident, $($ty:ty),*) => {
+                match $kind {
+                    $(
+                        stringify!($ty) => {
+                            let n: $ty = n.parse().unwrap();
+                            format!(concat!("{}", stringify!($ty)), n)
+                        }
+                    )*
+                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+                }
+            }
+        }
+
+        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn float(&mut self, n: &str) -> Self::Literal {
+        let n: f64 = n.parse().unwrap();
+        let mut text = f64::to_string(&n);
+        if !text.contains('.') {
+            text += ".0"
+        }
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn f32(&mut self, n: &str) -> Self::Literal {
+        let n: f32 = n.parse().unwrap();
+        let text = format!("{}f32", n);
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn f64(&mut self, n: &str) -> Self::Literal {
+        let n: f64 = n.parse().unwrap();
+        let text = format!("{}f64", n);
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn string(&mut self, string: &str) -> Self::Literal {
+        let mut escaped = String::new();
+        for ch in string.chars() {
+            escaped.extend(ch.escape_debug());
+        }
+        Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn character(&mut self, ch: char) -> Self::Literal {
+        Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+        let string = bytes
+            .iter()
+            .cloned()
+            .flat_map(ascii::escape_default)
+            .map(Into::<char>::into)
+            .collect::<String>();
+
+        Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+        literal.id
+    }
+
+    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+        literal.id = span;
+    }
+
+    fn subspan(
+        &mut self,
+        _literal: &Self::Literal,
+        _start: Bound<usize>,
+        _end: Bound<usize>,
+    ) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+}
+
+impl server::SourceFile for RustAnalyzer {
+    // FIXME these are all stubs
+    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+        true
+    }
+    fn path(&mut self, _file: &Self::SourceFile) -> String {
+        String::new()
+    }
+    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+        true
+    }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+        let mut diag = Diagnostic::new(level, msg);
+        diag.spans = spans;
+        diag
+    }
+
+    fn sub(
+        &mut self,
+        _diag: &mut Self::Diagnostic,
+        _level: Level,
+        _msg: &str,
+        _spans: Self::MultiSpan,
+    ) {
+        // FIXME handle diagnostic
+        //
+    }
+
+    fn emit(&mut self, _diag: Self::Diagnostic) {
+        // FIXME handle diagnostic
+        // diag.emit()
+    }
+}
+
+impl server::Span for RustAnalyzer {
+    fn debug(&mut self, span: Self::Span) -> String {
+        format!("{:?}", span.0)
+    }
+    fn def_site(&mut self) -> Self::Span {
+        // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+    fn call_site(&mut self) -> Self::Span {
+        // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+        SourceFile {}
+    }
+    fn save_span(&mut self, _span: Self::Span) -> usize {
+        // FIXME stub
+        0
+    }
+    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+        // FIXME stub
+        tt::TokenId::unspecified()
+    }
+    /// Recent feature, not yet in the proc_macro
+    ///
+    /// See PR:
+    /// https://github.com/rust-lang/rust/pull/55780
+    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+        None
+    }
+
+    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+    fn source(&mut self, span: Self::Span) -> Self::Span {
+        // FIXME handle span
+        span
+    }
+    fn start(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn end(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+        // Just return the first span again, because some macros will unwrap the result.
+        Some(first)
+    }
+    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+
+    fn mixed_site(&mut self) -> Self::Span {
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+
+    fn after(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+
+    fn before(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+    fn new(&mut self) -> Self::MultiSpan {
+        // FIXME handle span
+        vec![]
+    }
+
+    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+        //TODP
+        other.push(span)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::super::proc_macro::bridge::server::Literal;
+    use super::*;
+
+    #[test]
+    fn test_ra_server_literals() {
+        let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+        assert_eq!(srv.integer("1234").text, "1234");
+
+        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+        assert_eq!(srv.float("0").text, "0.0");
+        assert_eq!(srv.float("15684.5867").text, "15684.5867");
+        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+        assert_eq!(srv.character('c').text, "'c'");
+        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+        // u128::max
+        assert_eq!(
+            srv.integer("340282366920938463463374607431768211455").text,
+            "340282366920938463463374607431768211455"
+        );
+        // i128::min
+        assert_eq!(
+            srv.integer("-170141183460469231731687303715884105728").text,
+            "-170141183460469231731687303715884105728"
+        );
+    }
+
+    #[test]
+    fn test_ra_server_to_string() {
+        let s = TokenStream {
+            token_trees: vec![
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "struct".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "T".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: Some(tt::Delimiter {
+                        id: tt::TokenId::unspecified(),
+                        kind: tt::DelimiterKind::Brace,
+                    }),
+                    token_trees: vec![],
+                }),
+            ],
+        };
+
+        assert_eq!(s.to_string(), "struct T {}");
+    }
+
+    #[test]
+    fn test_ra_server_from_str() {
+        use std::str::FromStr;
+        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+            delimiter: Some(tt::Delimiter {
+                id: tt::TokenId::unspecified(),
+                kind: tt::DelimiterKind::Parenthesis,
+            }),
+            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "a".into(),
+                id: tt::TokenId::unspecified(),
+            }))],
+        });
+
+        let t1 = TokenStream::from_str("(a)").unwrap();
+        assert_eq!(t1.token_trees.len(), 1);
+        assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+        let t2 = TokenStream::from_str("(a);").unwrap();
+        assert_eq!(t2.token_trees.len(), 2);
+        assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+        let underscore = TokenStream::from_str("_").unwrap();
+        assert_eq!(
+            underscore.token_trees[0],
+            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "_".into(),
+                id: tt::TokenId::unspecified(),
+            }))
+        );
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs
deleted file mode 100644 (file)
index 7fd422c..0000000
+++ /dev/null
@@ -1,834 +0,0 @@
-//! Rustc proc-macro server implementation with tt
-//!
-//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
-//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
-//! we could provide any TokenStream implementation.
-//! The original idea from fedochet is using proc-macro2 as backend,
-//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-
-use super::proc_macro::bridge::{self, server};
-
-use std::collections::HashMap;
-use std::hash::Hash;
-use std::iter::FromIterator;
-use std::ops::Bound;
-use std::{ascii, vec::IntoIter};
-
-type Group = tt::Subtree;
-type TokenTree = tt::TokenTree;
-type Punct = tt::Punct;
-type Spacing = tt::Spacing;
-type Literal = tt::Literal;
-type Span = tt::TokenId;
-
-#[derive(Debug, Default, Clone)]
-pub struct TokenStream {
-    pub token_trees: Vec<TokenTree>,
-}
-
-impl TokenStream {
-    pub fn new() -> Self {
-        TokenStream::default()
-    }
-
-    pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        if subtree.delimiter.is_some() {
-            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
-        } else {
-            TokenStream { token_trees: subtree.token_trees }
-        }
-    }
-
-    pub fn into_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self.token_trees }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.token_trees.is_empty()
-    }
-}
-
-/// Creates a token stream containing a single token tree.
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream { token_trees: vec![tree] }
-    }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
-        trees.into_iter().map(TokenStream::from).collect()
-    }
-}
-
-/// A "flattening" operation on token streams, collects token trees
-/// from multiple token streams into a single stream.
-impl FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
-        let mut builder = TokenStreamBuilder::new();
-        streams.into_iter().for_each(|stream| builder.push(stream));
-        builder.build()
-    }
-}
-
-impl Extend<TokenTree> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
-        self.extend(trees.into_iter().map(TokenStream::from));
-    }
-}
-
-impl Extend<TokenStream> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
-        for item in streams {
-            for tkn in item {
-                match tkn {
-                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
-                        self.token_trees.extend(subtree.token_trees);
-                    }
-                    _ => {
-                        self.token_trees.push(tkn);
-                    }
-                }
-            }
-        }
-    }
-}
-
-#[derive(Clone)]
-pub struct SourceFile {
-    // FIXME stub
-}
-
-type Level = super::proc_macro::Level;
-type LineColumn = super::proc_macro::LineColumn;
-
-/// A structure representing a diagnostic message and associated children
-/// messages.
-#[derive(Clone, Debug)]
-pub struct Diagnostic {
-    level: Level,
-    message: String,
-    spans: Vec<Span>,
-    children: Vec<Diagnostic>,
-}
-
-impl Diagnostic {
-    /// Creates a new diagnostic with the given `level` and `message`.
-    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
-        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
-    }
-}
-
-// Rustc Server Ident has to be `Copyable`
-// We use a stub here for bypassing
-#[derive(Hash, Eq, PartialEq, Copy, Clone)]
-pub struct IdentId(u32);
-
-#[derive(Clone, Hash, Eq, PartialEq)]
-struct IdentData(tt::Ident);
-
-#[derive(Default)]
-struct IdentInterner {
-    idents: HashMap<IdentData, u32>,
-    ident_data: Vec<IdentData>,
-}
-
-impl IdentInterner {
-    fn intern(&mut self, data: &IdentData) -> u32 {
-        if let Some(index) = self.idents.get(data) {
-            return *index;
-        }
-
-        let index = self.idents.len() as u32;
-        self.ident_data.push(data.clone());
-        self.idents.insert(data.clone(), index);
-        index
-    }
-
-    fn get(&self, index: u32) -> &IdentData {
-        &self.ident_data[index as usize]
-    }
-
-    #[allow(unused)]
-    fn get_mut(&mut self, index: u32) -> &mut IdentData {
-        self.ident_data.get_mut(index as usize).expect("Should be consistent")
-    }
-}
-
-pub struct TokenStreamBuilder {
-    acc: TokenStream,
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
-    use std::str::FromStr;
-
-    use super::{TokenStream, TokenTree};
-
-    /// An iterator over `TokenStream`'s `TokenTree`s.
-    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
-    /// and returns whole groups as token trees.
-    impl IntoIterator for TokenStream {
-        type Item = TokenTree;
-        type IntoIter = super::IntoIter<TokenTree>;
-
-        fn into_iter(self) -> Self::IntoIter {
-            self.token_trees.into_iter()
-        }
-    }
-
-    type LexError = String;
-
-    /// Attempts to break the string into tokens and parse those tokens into a token stream.
-    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
-    /// or characters not existing in the language.
-    /// All tokens in the parsed stream get `Span::call_site()` spans.
-    ///
-    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
-    /// change these errors into `LexError`s later.
-    impl FromStr for TokenStream {
-        type Err = LexError;
-
-        fn from_str(src: &str) -> Result<TokenStream, LexError> {
-            let (subtree, _token_map) =
-                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
-
-            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
-            Ok(TokenStream::with_subtree(subtree))
-        }
-    }
-
-    impl ToString for TokenStream {
-        fn to_string(&self) -> String {
-            tt::pretty(&self.token_trees)
-        }
-    }
-
-    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
-        tt::Subtree {
-            delimiter: subtree
-                .delimiter
-                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
-            token_trees: subtree
-                .token_trees
-                .into_iter()
-                .map(token_tree_replace_token_ids_with_unspecified)
-                .collect(),
-        }
-    }
-
-    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
-        match tt {
-            tt::TokenTree::Leaf(leaf) => {
-                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
-            }
-            tt::TokenTree::Subtree(subtree) => {
-                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
-            }
-        }
-    }
-
-    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
-        match leaf {
-            tt::Leaf::Literal(lit) => {
-                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
-            }
-            tt::Leaf::Punct(punct) => {
-                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
-            }
-            tt::Leaf::Ident(ident) => {
-                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
-            }
-        }
-    }
-}
-
-impl TokenStreamBuilder {
-    fn new() -> TokenStreamBuilder {
-        TokenStreamBuilder { acc: TokenStream::new() }
-    }
-
-    fn push(&mut self, stream: TokenStream) {
-        self.acc.extend(stream.into_iter())
-    }
-
-    fn build(self) -> TokenStream {
-        self.acc
-    }
-}
-
-pub struct FreeFunctions;
-
-#[derive(Clone)]
-pub struct TokenStreamIter {
-    trees: IntoIter<TokenTree>,
-}
-
-#[derive(Default)]
-pub struct Rustc {
-    ident_interner: IdentInterner,
-    // FIXME: store span information here.
-}
-
-impl server::Types for Rustc {
-    type FreeFunctions = FreeFunctions;
-    type TokenStream = TokenStream;
-    type Group = Group;
-    type Punct = Punct;
-    type Ident = IdentId;
-    type Literal = Literal;
-    type SourceFile = SourceFile;
-    type Diagnostic = Diagnostic;
-    type Span = Span;
-    type MultiSpan = Vec<Span>;
-}
-
-impl server::FreeFunctions for Rustc {
-    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
-        // FIXME: track env var accesses
-        // https://github.com/rust-lang/rust/pull/71858
-    }
-    fn track_path(&mut self, _path: &str) {}
-}
-
-impl server::TokenStream for Rustc {
-    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
-        stream.is_empty()
-    }
-    fn from_str(&mut self, src: &str) -> Self::TokenStream {
-        use std::str::FromStr;
-
-        Self::TokenStream::from_str(src).expect("cannot parse string")
-    }
-    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
-        stream.to_string()
-    }
-    fn from_token_tree(
-        &mut self,
-        tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
-    ) -> Self::TokenStream {
-        match tree {
-            bridge::TokenTree::Group(group) => {
-                let tree = TokenTree::from(group);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Ident(IdentId(index)) => {
-                let IdentData(ident) = self.ident_interner.get(index).clone();
-                let ident: tt::Ident = ident;
-                let leaf = tt::Leaf::from(ident);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Literal(literal) => {
-                let leaf = tt::Leaf::from(literal);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Punct(p) => {
-                let leaf = tt::Leaf::from(p);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-        }
-    }
-
-    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
-        Ok(self_.clone())
-    }
-
-    fn concat_trees(
-        &mut self,
-        base: Option<Self::TokenStream>,
-        trees: Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
-    ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
-        if let Some(base) = base {
-            builder.push(base);
-        }
-        for tree in trees {
-            builder.push(self.from_token_tree(tree));
-        }
-        builder.build()
-    }
-
-    fn concat_streams(
-        &mut self,
-        base: Option<Self::TokenStream>,
-        streams: Vec<Self::TokenStream>,
-    ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
-        if let Some(base) = base {
-            builder.push(base);
-        }
-        for stream in streams {
-            builder.push(stream);
-        }
-        builder.build()
-    }
-
-    fn into_trees(
-        &mut self,
-        stream: Self::TokenStream,
-    ) -> Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
-        stream
-            .into_iter()
-            .map(|tree| match tree {
-                tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
-                    bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
-                }
-                tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
-                tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
-                tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(subtree),
-            })
-            .collect()
-    }
-}
-
-fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
-    let kind = match d {
-        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
-        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
-        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
-        bridge::Delimiter::None => return None,
-    };
-    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
-}
-
-fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
-    match d.map(|it| it.kind) {
-        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
-        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
-        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
-        None => bridge::Delimiter::None,
-    }
-}
-
-fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
-    match spacing {
-        bridge::Spacing::Alone => Spacing::Alone,
-        bridge::Spacing::Joint => Spacing::Joint,
-    }
-}
-
-fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
-    match spacing {
-        Spacing::Alone => bridge::Spacing::Alone,
-        Spacing::Joint => bridge::Spacing::Joint,
-    }
-}
-
-impl server::Group for Rustc {
-    fn new(
-        &mut self,
-        delimiter: bridge::Delimiter,
-        stream: Option<Self::TokenStream>,
-    ) -> Self::Group {
-        Self::Group {
-            delimiter: delim_to_internal(delimiter),
-            token_trees: stream.unwrap_or_default().token_trees,
-        }
-    }
-    fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
-        delim_to_external(group.delimiter)
-    }
-
-    // NOTE: Return value of do not include delimiter
-    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
-        TokenStream { token_trees: group.token_trees.clone() }
-    }
-
-    fn span(&mut self, group: &Self::Group) -> Self::Span {
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-
-    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
-        if let Some(delim) = &mut group.delimiter {
-            delim.id = span;
-        }
-    }
-
-    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-
-    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-}
-
-impl server::Punct for Rustc {
-    fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
-        tt::Punct {
-            char: ch,
-            spacing: spacing_to_internal(spacing),
-            id: tt::TokenId::unspecified(),
-        }
-    }
-    fn as_char(&mut self, punct: Self::Punct) -> char {
-        punct.char
-    }
-    fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
-        spacing_to_external(punct.spacing)
-    }
-    fn span(&mut self, punct: Self::Punct) -> Self::Span {
-        punct.id
-    }
-    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
-        tt::Punct { id: span, ..punct }
-    }
-}
-
-impl server::Ident for Rustc {
-    fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
-        IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
-    }
-
-    fn span(&mut self, ident: Self::Ident) -> Self::Span {
-        self.ident_interner.get(ident.0).0.id
-    }
-    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
-        let data = self.ident_interner.get(ident.0);
-        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
-        IdentId(self.ident_interner.intern(&new))
-    }
-}
-
-impl server::Literal for Rustc {
-    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
-        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
-        // They must still be present to be ABI-compatible and work with upstream proc_macro.
-        "".to_owned()
-    }
-    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
-        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
-    }
-    fn symbol(&mut self, literal: &Self::Literal) -> String {
-        literal.text.to_string()
-    }
-    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
-        None
-    }
-
-    fn to_string(&mut self, literal: &Self::Literal) -> String {
-        literal.to_string()
-    }
-
-    fn integer(&mut self, n: &str) -> Self::Literal {
-        let n = match n.parse::<i128>() {
-            Ok(n) => n.to_string(),
-            Err(_) => n.parse::<u128>().unwrap().to_string(),
-        };
-        Literal { text: n.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
-        macro_rules! def_suffixed_integer {
-            ($kind:ident, $($ty:ty),*) => {
-                match $kind {
-                    $(
-                        stringify!($ty) => {
-                            let n: $ty = n.parse().unwrap();
-                            format!(concat!("{}", stringify!($ty)), n)
-                        }
-                    )*
-                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
-                }
-            }
-        }
-
-        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
-
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn float(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let mut text = f64::to_string(&n);
-        if !text.contains('.') {
-            text += ".0"
-        }
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f32(&mut self, n: &str) -> Self::Literal {
-        let n: f32 = n.parse().unwrap();
-        let text = format!("{}f32", n);
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f64(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let text = format!("{}f64", n);
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn string(&mut self, string: &str) -> Self::Literal {
-        let mut escaped = String::new();
-        for ch in string.chars() {
-            escaped.extend(ch.escape_debug());
-        }
-        Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn character(&mut self, ch: char) -> Self::Literal {
-        Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
-        let string = bytes
-            .iter()
-            .cloned()
-            .flat_map(ascii::escape_default)
-            .map(Into::<char>::into)
-            .collect::<String>();
-
-        Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
-        literal.id
-    }
-
-    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
-        literal.id = span;
-    }
-
-    fn subspan(
-        &mut self,
-        _literal: &Self::Literal,
-        _start: Bound<usize>,
-        _end: Bound<usize>,
-    ) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-}
-
-impl server::SourceFile for Rustc {
-    // FIXME these are all stubs
-    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
-        true
-    }
-    fn path(&mut self, _file: &Self::SourceFile) -> String {
-        String::new()
-    }
-    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
-        true
-    }
-}
-
-impl server::Diagnostic for Rustc {
-    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
-        let mut diag = Diagnostic::new(level, msg);
-        diag.spans = spans;
-        diag
-    }
-
-    fn sub(
-        &mut self,
-        _diag: &mut Self::Diagnostic,
-        _level: Level,
-        _msg: &str,
-        _spans: Self::MultiSpan,
-    ) {
-        // FIXME handle diagnostic
-        //
-    }
-
-    fn emit(&mut self, _diag: Self::Diagnostic) {
-        // FIXME handle diagnostic
-        // diag.emit()
-    }
-}
-
-impl server::Span for Rustc {
-    fn debug(&mut self, span: Self::Span) -> String {
-        format!("{:?}", span.0)
-    }
-    fn def_site(&mut self) -> Self::Span {
-        // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-    fn call_site(&mut self) -> Self::Span {
-        // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
-        SourceFile {}
-    }
-    fn save_span(&mut self, _span: Self::Span) -> usize {
-        // FIXME stub
-        0
-    }
-    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
-        // FIXME stub
-        tt::TokenId::unspecified()
-    }
-    /// Recent feature, not yet in the proc_macro
-    ///
-    /// See PR:
-    /// https://github.com/rust-lang/rust/pull/55780
-    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
-        None
-    }
-
-    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-    fn source(&mut self, span: Self::Span) -> Self::Span {
-        // FIXME handle span
-        span
-    }
-    fn start(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn end(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
-        // Just return the first span again, because some macros will unwrap the result.
-        Some(first)
-    }
-    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn mixed_site(&mut self) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn after(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-
-    fn before(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-}
-
-impl server::MultiSpan for Rustc {
-    fn new(&mut self) -> Self::MultiSpan {
-        // FIXME handle span
-        vec![]
-    }
-
-    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
-        //TODP
-        other.push(span)
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::super::proc_macro::bridge::server::Literal;
-    use super::*;
-
-    #[test]
-    fn test_rustc_server_literals() {
-        let mut srv = Rustc { ident_interner: IdentInterner::default() };
-        assert_eq!(srv.integer("1234").text, "1234");
-
-        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
-        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
-        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
-        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
-        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
-        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
-        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
-        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
-        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
-        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
-        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
-        assert_eq!(srv.float("0").text, "0.0");
-        assert_eq!(srv.float("15684.5867").text, "15684.5867");
-        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
-        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
-
-        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
-        assert_eq!(srv.character('c').text, "'c'");
-        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
-
-        // u128::max
-        assert_eq!(
-            srv.integer("340282366920938463463374607431768211455").text,
-            "340282366920938463463374607431768211455"
-        );
-        // i128::min
-        assert_eq!(
-            srv.integer("-170141183460469231731687303715884105728").text,
-            "-170141183460469231731687303715884105728"
-        );
-    }
-
-    #[test]
-    fn test_rustc_server_to_string() {
-        let s = TokenStream {
-            token_trees: vec![
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "struct".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "T".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
-                        kind: tt::DelimiterKind::Brace,
-                    }),
-                    token_trees: vec![],
-                }),
-            ],
-        };
-
-        assert_eq!(s.to_string(), "struct T {}");
-    }
-
-    #[test]
-    fn test_rustc_server_from_str() {
-        use std::str::FromStr;
-        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                id: tt::TokenId::unspecified(),
-                kind: tt::DelimiterKind::Parenthesis,
-            }),
-            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "a".into(),
-                id: tt::TokenId::unspecified(),
-            }))],
-        });
-
-        let t1 = TokenStream::from_str("(a)").unwrap();
-        assert_eq!(t1.token_trees.len(), 1);
-        assert_eq!(t1.token_trees[0], subtree_paren_a);
-
-        let t2 = TokenStream::from_str("(a);").unwrap();
-        assert_eq!(t2.token_trees.len(), 2);
-        assert_eq!(t2.token_trees[0], subtree_paren_a);
-
-        let underscore = TokenStream::from_str("_").unwrap();
-        assert_eq!(
-            underscore.token_trees[0],
-            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "_".into(),
-                id: tt::TokenId::unspecified(),
-            }))
-        );
-    }
-}
index 0ba4bbbef086ec7475cc9fa777a0df7e5c0e9d8a..9d56f0eaf888f74403e1051c3c026ab357bbff2d 100644 (file)
@@ -6,14 +6,14 @@
 
 #[allow(dead_code)]
 #[doc(hidden)]
-mod rustc_server;
+mod ra_server;
 
 use libloading::Library;
 use proc_macro_api::ProcMacroKind;
 
 use super::PanicMessage;
 
-pub use rustc_server::TokenStream;
+pub use ra_server::TokenStream;
 
 pub(crate) struct Abi {
     exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
@@ -50,7 +50,7 @@ pub fn expand(
                 } if *trait_name == macro_name => {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_body,
                         true,
                     );
@@ -61,7 +61,7 @@ pub fn expand(
                 {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_body,
                         true,
                     );
@@ -72,7 +72,7 @@ pub fn expand(
                 {
                     let res = client.run(
                         &proc_macro::bridge::server::SameThread,
-                        rustc_server::Rustc::default(),
+                        ra_server::RustAnalyzer::default(),
                         parsed_attributes,
                         parsed_body,
                         true,
diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
new file mode 100644 (file)
index 0000000..7e8e678
--- /dev/null
@@ -0,0 +1,792 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+    pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+    pub fn new() -> Self {
+        TokenStream::default()
+    }
+
+    pub fn with_subtree(subtree: tt::Subtree) -> Self {
+        if subtree.delimiter.is_some() {
+            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+        } else {
+            TokenStream { token_trees: subtree.token_trees }
+        }
+    }
+
+    pub fn into_subtree(self) -> tt::Subtree {
+        tt::Subtree { delimiter: None, token_trees: self.token_trees }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.token_trees.is_empty()
+    }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+    fn from(tree: TokenTree) -> TokenStream {
+        TokenStream { token_trees: vec![tree] }
+    }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+        trees.into_iter().map(TokenStream::from).collect()
+    }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        let mut builder = TokenStreamBuilder::new();
+        streams.into_iter().for_each(|stream| builder.push(stream));
+        builder.build()
+    }
+}
+
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+        self.extend(trees.into_iter().map(TokenStream::from));
+    }
+}
+
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        for item in streams {
+            for tkn in item {
+                match tkn {
+                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+                        self.token_trees.extend(subtree.token_trees);
+                    }
+                    _ => {
+                        self.token_trees.push(tkn);
+                    }
+                }
+            }
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+    // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+    level: Level,
+    message: String,
+    spans: Vec<Span>,
+    children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+    /// Creates a new diagnostic with the given `level` and `message`.
+    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+    }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+    idents: HashMap<IdentData, u32>,
+    ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+    fn intern(&mut self, data: &IdentData) -> u32 {
+        if let Some(index) = self.idents.get(data) {
+            return *index;
+        }
+
+        let index = self.idents.len() as u32;
+        self.ident_data.push(data.clone());
+        self.idents.insert(data.clone(), index);
+        index
+    }
+
+    fn get(&self, index: u32) -> &IdentData {
+        &self.ident_data[index as usize]
+    }
+
+    #[allow(unused)]
+    fn get_mut(&mut self, index: u32) -> &mut IdentData {
+        self.ident_data.get_mut(index as usize).expect("Should be consistent")
+    }
+}
+
+pub struct TokenStreamBuilder {
+    acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+    use std::str::FromStr;
+
+    use super::{TokenStream, TokenTree};
+
+    /// An iterator over `TokenStream`'s `TokenTree`s.
+    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+    /// and returns whole groups as token trees.
+    impl IntoIterator for TokenStream {
+        type Item = TokenTree;
+        type IntoIter = super::IntoIter<TokenTree>;
+
+        fn into_iter(self) -> Self::IntoIter {
+            self.token_trees.into_iter()
+        }
+    }
+
+    type LexError = String;
+
+    /// Attempts to break the string into tokens and parse those tokens into a token stream.
+    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+    /// or characters not existing in the language.
+    /// All tokens in the parsed stream get `Span::call_site()` spans.
+    ///
+    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+    /// change these errors into `LexError`s later.
+    impl FromStr for TokenStream {
+        type Err = LexError;
+
+        fn from_str(src: &str) -> Result<TokenStream, LexError> {
+            let (subtree, _token_map) =
+                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+            Ok(TokenStream::with_subtree(subtree))
+        }
+    }
+
+    impl ToString for TokenStream {
+        fn to_string(&self) -> String {
+            tt::pretty(&self.token_trees)
+        }
+    }
+
+    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+        tt::Subtree {
+            delimiter: subtree
+                .delimiter
+                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            token_trees: subtree
+                .token_trees
+                .into_iter()
+                .map(token_tree_replace_token_ids_with_unspecified)
+                .collect(),
+        }
+    }
+
+    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+        match tt {
+            tt::TokenTree::Leaf(leaf) => {
+                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+            }
+            tt::TokenTree::Subtree(subtree) => {
+                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+            }
+        }
+    }
+
+    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+        match leaf {
+            tt::Leaf::Literal(lit) => {
+                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+            }
+            tt::Leaf::Punct(punct) => {
+                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+            }
+            tt::Leaf::Ident(ident) => {
+                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+            }
+        }
+    }
+}
+
+impl TokenStreamBuilder {
+    fn new() -> TokenStreamBuilder {
+        TokenStreamBuilder { acc: TokenStream::new() }
+    }
+
+    fn push(&mut self, stream: TokenStream) {
+        self.acc.extend(stream.into_iter())
+    }
+
+    fn build(self) -> TokenStream {
+        self.acc
+    }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+    trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+    ident_interner: IdentInterner,
+    // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+    type FreeFunctions = FreeFunctions;
+    type TokenStream = TokenStream;
+    type Ident = IdentId;
+    type Literal = Literal;
+    type SourceFile = SourceFile;
+    type Diagnostic = Diagnostic;
+    type Span = Span;
+    type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+        // FIXME: track env var accesses
+        // https://github.com/rust-lang/rust/pull/71858
+    }
+    fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+        stream.is_empty()
+    }
+    fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        use std::str::FromStr;
+
+        Self::TokenStream::from_str(src).expect("cannot parse string")
+    }
+    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+        stream.to_string()
+    }
+    fn from_token_tree(
+        &mut self,
+        tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>,
+    ) -> Self::TokenStream {
+        match tree {
+            bridge::TokenTree::Group(group) => {
+                let group = Group {
+                    delimiter: delim_to_internal(group.delimiter),
+                    token_trees: match group.stream {
+                        Some(stream) => stream.into_iter().collect(),
+                        None => Vec::new(),
+                    },
+                };
+                let tree = TokenTree::from(group);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Ident(IdentId(index)) => {
+                let IdentData(ident) = self.ident_interner.get(index).clone();
+                let ident: tt::Ident = ident;
+                let leaf = tt::Leaf::from(ident);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Literal(literal) => {
+                let leaf = tt::Leaf::from(literal);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Punct(p) => {
+                let punct = tt::Punct {
+                    char: p.ch as char,
+                    spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+                    id: p.span,
+                };
+                let leaf = tt::Leaf::from(punct);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+        }
+    }
+
+    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+        Ok(self_.clone())
+    }
+
+    fn concat_trees(
+        &mut self,
+        base: Option<Self::TokenStream>,
+        trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>>,
+    ) -> Self::TokenStream {
+        let mut builder = TokenStreamBuilder::new();
+        if let Some(base) = base {
+            builder.push(base);
+        }
+        for tree in trees {
+            builder.push(self.from_token_tree(tree));
+        }
+        builder.build()
+    }
+
+    fn concat_streams(
+        &mut self,
+        base: Option<Self::TokenStream>,
+        streams: Vec<Self::TokenStream>,
+    ) -> Self::TokenStream {
+        let mut builder = TokenStreamBuilder::new();
+        if let Some(base) = base {
+            builder.push(base);
+        }
+        for stream in streams {
+            builder.push(stream);
+        }
+        builder.build()
+    }
+
+    fn into_trees(
+        &mut self,
+        stream: Self::TokenStream,
+    ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>> {
+        stream
+            .into_iter()
+            .map(|tree| match tree {
+                tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+                    bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+                }
+                tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+                tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+                    bridge::TokenTree::Punct(bridge::Punct {
+                        ch: punct.char as u8,
+                        joint: punct.spacing == Spacing::Joint,
+                        span: punct.id,
+                    })
+                }
+                tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+                    delimiter: delim_to_external(subtree.delimiter),
+                    stream: if subtree.token_trees.is_empty() {
+                        None
+                    } else {
+                        Some(subtree.token_trees.into_iter().collect())
+                    },
+                    span: bridge::DelimSpan::from_single(
+                        subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+                    ),
+                }),
+            })
+            .collect()
+    }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+    let kind = match d {
+        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+        bridge::Delimiter::None => return None,
+    };
+    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+    match d.map(|it| it.kind) {
+        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+        None => bridge::Delimiter::None,
+    }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+    match spacing {
+        bridge::Spacing::Alone => Spacing::Alone,
+        bridge::Spacing::Joint => Spacing::Joint,
+    }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+    match spacing {
+        Spacing::Alone => bridge::Spacing::Alone,
+        Spacing::Joint => bridge::Spacing::Joint,
+    }
+}
+
+impl server::Ident for RustAnalyzer {
+    fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+        IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+    }
+
+    fn span(&mut self, ident: Self::Ident) -> Self::Span {
+        self.ident_interner.get(ident.0).0.id
+    }
+    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+        let data = self.ident_interner.get(ident.0);
+        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+        IdentId(self.ident_interner.intern(&new))
+    }
+}
+
+impl server::Literal for RustAnalyzer {
+    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+        // They must still be present to be ABI-compatible and work with upstream proc_macro.
+        "".to_owned()
+    }
+    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+    }
+    fn symbol(&mut self, literal: &Self::Literal) -> String {
+        literal.text.to_string()
+    }
+    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+        None
+    }
+
+    fn to_string(&mut self, literal: &Self::Literal) -> String {
+        literal.to_string()
+    }
+
+    fn integer(&mut self, n: &str) -> Self::Literal {
+        let n = match n.parse::<i128>() {
+            Ok(n) => n.to_string(),
+            Err(_) => n.parse::<u128>().unwrap().to_string(),
+        };
+        Literal { text: n.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+        macro_rules! def_suffixed_integer {
+            ($kind:ident, $($ty:ty),*) => {
+                match $kind {
+                    $(
+                        stringify!($ty) => {
+                            let n: $ty = n.parse().unwrap();
+                            format!(concat!("{}", stringify!($ty)), n)
+                        }
+                    )*
+                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+                }
+            }
+        }
+
+        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn float(&mut self, n: &str) -> Self::Literal {
+        let n: f64 = n.parse().unwrap();
+        let mut text = f64::to_string(&n);
+        if !text.contains('.') {
+            text += ".0"
+        }
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn f32(&mut self, n: &str) -> Self::Literal {
+        let n: f32 = n.parse().unwrap();
+        let text = format!("{}f32", n);
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn f64(&mut self, n: &str) -> Self::Literal {
+        let n: f64 = n.parse().unwrap();
+        let text = format!("{}f64", n);
+        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn string(&mut self, string: &str) -> Self::Literal {
+        let mut escaped = String::new();
+        for ch in string.chars() {
+            escaped.extend(ch.escape_debug());
+        }
+        Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn character(&mut self, ch: char) -> Self::Literal {
+        Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+        let string = bytes
+            .iter()
+            .cloned()
+            .flat_map(ascii::escape_default)
+            .map(Into::<char>::into)
+            .collect::<String>();
+
+        Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+    }
+
+    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+        literal.id
+    }
+
+    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+        literal.id = span;
+    }
+
+    fn subspan(
+        &mut self,
+        _literal: &Self::Literal,
+        _start: Bound<usize>,
+        _end: Bound<usize>,
+    ) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+}
+
+impl server::SourceFile for RustAnalyzer {
+    // FIXME these are all stubs
+    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+        true
+    }
+    fn path(&mut self, _file: &Self::SourceFile) -> String {
+        String::new()
+    }
+    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+        true
+    }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+        let mut diag = Diagnostic::new(level, msg);
+        diag.spans = spans;
+        diag
+    }
+
+    fn sub(
+        &mut self,
+        _diag: &mut Self::Diagnostic,
+        _level: Level,
+        _msg: &str,
+        _spans: Self::MultiSpan,
+    ) {
+        // FIXME handle diagnostic
+        //
+    }
+
+    fn emit(&mut self, _diag: Self::Diagnostic) {
+        // FIXME handle diagnostic
+        // diag.emit()
+    }
+}
+
+impl server::Span for RustAnalyzer {
+    fn debug(&mut self, span: Self::Span) -> String {
+        format!("{:?}", span.0)
+    }
+    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+        SourceFile {}
+    }
+    fn save_span(&mut self, _span: Self::Span) -> usize {
+        // FIXME stub
+        0
+    }
+    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+        // FIXME stub
+        tt::TokenId::unspecified()
+    }
+    /// Recent feature, not yet in the proc_macro
+    ///
+    /// See PR:
+    /// https://github.com/rust-lang/rust/pull/55780
+    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+        None
+    }
+
+    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+    fn source(&mut self, span: Self::Span) -> Self::Span {
+        // FIXME handle span
+        span
+    }
+    fn start(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn end(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+        // Just return the first span again, because some macros will unwrap the result.
+        Some(first)
+    }
+    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+
+    fn after(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+
+    fn before(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+    fn new(&mut self) -> Self::MultiSpan {
+        // FIXME handle span
+        vec![]
+    }
+
+    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+        //TODP
+        other.push(span)
+    }
+}
+
+impl server::Server for RustAnalyzer {
+    fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+        bridge::ExpnGlobals {
+            def_site: Span::unspecified(),
+            call_site: Span::unspecified(),
+            mixed_site: Span::unspecified(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::super::proc_macro::bridge::server::Literal;
+    use super::*;
+
+    #[test]
+    fn test_ra_server_literals() {
+        let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+        assert_eq!(srv.integer("1234").text, "1234");
+
+        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+        assert_eq!(srv.float("0").text, "0.0");
+        assert_eq!(srv.float("15684.5867").text, "15684.5867");
+        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+        assert_eq!(srv.character('c').text, "'c'");
+        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+        // u128::max
+        assert_eq!(
+            srv.integer("340282366920938463463374607431768211455").text,
+            "340282366920938463463374607431768211455"
+        );
+        // i128::min
+        assert_eq!(
+            srv.integer("-170141183460469231731687303715884105728").text,
+            "-170141183460469231731687303715884105728"
+        );
+    }
+
+    #[test]
+    fn test_ra_server_to_string() {
+        let s = TokenStream {
+            token_trees: vec![
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "struct".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "T".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: Some(tt::Delimiter {
+                        id: tt::TokenId::unspecified(),
+                        kind: tt::DelimiterKind::Brace,
+                    }),
+                    token_trees: vec![],
+                }),
+            ],
+        };
+
+        assert_eq!(s.to_string(), "struct T {}");
+    }
+
+    #[test]
+    fn test_ra_server_from_str() {
+        use std::str::FromStr;
+        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+            delimiter: Some(tt::Delimiter {
+                id: tt::TokenId::unspecified(),
+                kind: tt::DelimiterKind::Parenthesis,
+            }),
+            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "a".into(),
+                id: tt::TokenId::unspecified(),
+            }))],
+        });
+
+        let t1 = TokenStream::from_str("(a)").unwrap();
+        assert_eq!(t1.token_trees.len(), 1);
+        assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+        let t2 = TokenStream::from_str("(a);").unwrap();
+        assert_eq!(t2.token_trees.len(), 2);
+        assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+        let underscore = TokenStream::from_str("_").unwrap();
+        assert_eq!(
+            underscore.token_trees[0],
+            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "_".into(),
+                id: tt::TokenId::unspecified(),
+            }))
+        );
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/rustc_server.rs b/crates/proc-macro-srv/src/abis/abi_1_64/rustc_server.rs
deleted file mode 100644 (file)
index 757454a..0000000
+++ /dev/null
@@ -1,792 +0,0 @@
-//! Rustc proc-macro server implementation with tt
-//!
-//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
-//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
-//! we could provide any TokenStream implementation.
-//! The original idea from fedochet is using proc-macro2 as backend,
-//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-
-use super::proc_macro::bridge::{self, server};
-
-use std::collections::HashMap;
-use std::hash::Hash;
-use std::iter::FromIterator;
-use std::ops::Bound;
-use std::{ascii, vec::IntoIter};
-
-type Group = tt::Subtree;
-type TokenTree = tt::TokenTree;
-type Punct = tt::Punct;
-type Spacing = tt::Spacing;
-type Literal = tt::Literal;
-type Span = tt::TokenId;
-
-#[derive(Debug, Default, Clone)]
-pub struct TokenStream {
-    pub token_trees: Vec<TokenTree>,
-}
-
-impl TokenStream {
-    pub fn new() -> Self {
-        TokenStream::default()
-    }
-
-    pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        if subtree.delimiter.is_some() {
-            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
-        } else {
-            TokenStream { token_trees: subtree.token_trees }
-        }
-    }
-
-    pub fn into_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self.token_trees }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.token_trees.is_empty()
-    }
-}
-
-/// Creates a token stream containing a single token tree.
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream { token_trees: vec![tree] }
-    }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
-        trees.into_iter().map(TokenStream::from).collect()
-    }
-}
-
-/// A "flattening" operation on token streams, collects token trees
-/// from multiple token streams into a single stream.
-impl FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
-        let mut builder = TokenStreamBuilder::new();
-        streams.into_iter().for_each(|stream| builder.push(stream));
-        builder.build()
-    }
-}
-
-impl Extend<TokenTree> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
-        self.extend(trees.into_iter().map(TokenStream::from));
-    }
-}
-
-impl Extend<TokenStream> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
-        for item in streams {
-            for tkn in item {
-                match tkn {
-                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
-                        self.token_trees.extend(subtree.token_trees);
-                    }
-                    _ => {
-                        self.token_trees.push(tkn);
-                    }
-                }
-            }
-        }
-    }
-}
-
-#[derive(Clone)]
-pub struct SourceFile {
-    // FIXME stub
-}
-
-type Level = super::proc_macro::Level;
-type LineColumn = super::proc_macro::LineColumn;
-
-/// A structure representing a diagnostic message and associated children
-/// messages.
-#[derive(Clone, Debug)]
-pub struct Diagnostic {
-    level: Level,
-    message: String,
-    spans: Vec<Span>,
-    children: Vec<Diagnostic>,
-}
-
-impl Diagnostic {
-    /// Creates a new diagnostic with the given `level` and `message`.
-    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
-        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
-    }
-}
-
-// Rustc Server Ident has to be `Copyable`
-// We use a stub here for bypassing
-#[derive(Hash, Eq, PartialEq, Copy, Clone)]
-pub struct IdentId(u32);
-
-#[derive(Clone, Hash, Eq, PartialEq)]
-struct IdentData(tt::Ident);
-
-#[derive(Default)]
-struct IdentInterner {
-    idents: HashMap<IdentData, u32>,
-    ident_data: Vec<IdentData>,
-}
-
-impl IdentInterner {
-    fn intern(&mut self, data: &IdentData) -> u32 {
-        if let Some(index) = self.idents.get(data) {
-            return *index;
-        }
-
-        let index = self.idents.len() as u32;
-        self.ident_data.push(data.clone());
-        self.idents.insert(data.clone(), index);
-        index
-    }
-
-    fn get(&self, index: u32) -> &IdentData {
-        &self.ident_data[index as usize]
-    }
-
-    #[allow(unused)]
-    fn get_mut(&mut self, index: u32) -> &mut IdentData {
-        self.ident_data.get_mut(index as usize).expect("Should be consistent")
-    }
-}
-
-pub struct TokenStreamBuilder {
-    acc: TokenStream,
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
-    use std::str::FromStr;
-
-    use super::{TokenStream, TokenTree};
-
-    /// An iterator over `TokenStream`'s `TokenTree`s.
-    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
-    /// and returns whole groups as token trees.
-    impl IntoIterator for TokenStream {
-        type Item = TokenTree;
-        type IntoIter = super::IntoIter<TokenTree>;
-
-        fn into_iter(self) -> Self::IntoIter {
-            self.token_trees.into_iter()
-        }
-    }
-
-    type LexError = String;
-
-    /// Attempts to break the string into tokens and parse those tokens into a token stream.
-    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
-    /// or characters not existing in the language.
-    /// All tokens in the parsed stream get `Span::call_site()` spans.
-    ///
-    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
-    /// change these errors into `LexError`s later.
-    impl FromStr for TokenStream {
-        type Err = LexError;
-
-        fn from_str(src: &str) -> Result<TokenStream, LexError> {
-            let (subtree, _token_map) =
-                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
-
-            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
-            Ok(TokenStream::with_subtree(subtree))
-        }
-    }
-
-    impl ToString for TokenStream {
-        fn to_string(&self) -> String {
-            tt::pretty(&self.token_trees)
-        }
-    }
-
-    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
-        tt::Subtree {
-            delimiter: subtree
-                .delimiter
-                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
-            token_trees: subtree
-                .token_trees
-                .into_iter()
-                .map(token_tree_replace_token_ids_with_unspecified)
-                .collect(),
-        }
-    }
-
-    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
-        match tt {
-            tt::TokenTree::Leaf(leaf) => {
-                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
-            }
-            tt::TokenTree::Subtree(subtree) => {
-                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
-            }
-        }
-    }
-
-    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
-        match leaf {
-            tt::Leaf::Literal(lit) => {
-                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
-            }
-            tt::Leaf::Punct(punct) => {
-                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
-            }
-            tt::Leaf::Ident(ident) => {
-                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
-            }
-        }
-    }
-}
-
-impl TokenStreamBuilder {
-    fn new() -> TokenStreamBuilder {
-        TokenStreamBuilder { acc: TokenStream::new() }
-    }
-
-    fn push(&mut self, stream: TokenStream) {
-        self.acc.extend(stream.into_iter())
-    }
-
-    fn build(self) -> TokenStream {
-        self.acc
-    }
-}
-
-pub struct FreeFunctions;
-
-#[derive(Clone)]
-pub struct TokenStreamIter {
-    trees: IntoIter<TokenTree>,
-}
-
-#[derive(Default)]
-pub struct Rustc {
-    ident_interner: IdentInterner,
-    // FIXME: store span information here.
-}
-
-impl server::Types for Rustc {
-    type FreeFunctions = FreeFunctions;
-    type TokenStream = TokenStream;
-    type Ident = IdentId;
-    type Literal = Literal;
-    type SourceFile = SourceFile;
-    type Diagnostic = Diagnostic;
-    type Span = Span;
-    type MultiSpan = Vec<Span>;
-}
-
-impl server::FreeFunctions for Rustc {
-    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
-        // FIXME: track env var accesses
-        // https://github.com/rust-lang/rust/pull/71858
-    }
-    fn track_path(&mut self, _path: &str) {}
-}
-
-impl server::TokenStream for Rustc {
-    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
-        stream.is_empty()
-    }
-    fn from_str(&mut self, src: &str) -> Self::TokenStream {
-        use std::str::FromStr;
-
-        Self::TokenStream::from_str(src).expect("cannot parse string")
-    }
-    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
-        stream.to_string()
-    }
-    fn from_token_tree(
-        &mut self,
-        tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>,
-    ) -> Self::TokenStream {
-        match tree {
-            bridge::TokenTree::Group(group) => {
-                let group = Group {
-                    delimiter: delim_to_internal(group.delimiter),
-                    token_trees: match group.stream {
-                        Some(stream) => stream.into_iter().collect(),
-                        None => Vec::new(),
-                    },
-                };
-                let tree = TokenTree::from(group);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Ident(IdentId(index)) => {
-                let IdentData(ident) = self.ident_interner.get(index).clone();
-                let ident: tt::Ident = ident;
-                let leaf = tt::Leaf::from(ident);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Literal(literal) => {
-                let leaf = tt::Leaf::from(literal);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Punct(p) => {
-                let punct = tt::Punct {
-                    char: p.ch as char,
-                    spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
-                    id: p.span,
-                };
-                let leaf = tt::Leaf::from(punct);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-        }
-    }
-
-    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
-        Ok(self_.clone())
-    }
-
-    fn concat_trees(
-        &mut self,
-        base: Option<Self::TokenStream>,
-        trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>>,
-    ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
-        if let Some(base) = base {
-            builder.push(base);
-        }
-        for tree in trees {
-            builder.push(self.from_token_tree(tree));
-        }
-        builder.build()
-    }
-
-    fn concat_streams(
-        &mut self,
-        base: Option<Self::TokenStream>,
-        streams: Vec<Self::TokenStream>,
-    ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
-        if let Some(base) = base {
-            builder.push(base);
-        }
-        for stream in streams {
-            builder.push(stream);
-        }
-        builder.build()
-    }
-
-    fn into_trees(
-        &mut self,
-        stream: Self::TokenStream,
-    ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>> {
-        stream
-            .into_iter()
-            .map(|tree| match tree {
-                tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
-                    bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
-                }
-                tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
-                tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
-                    bridge::TokenTree::Punct(bridge::Punct {
-                        ch: punct.char as u8,
-                        joint: punct.spacing == Spacing::Joint,
-                        span: punct.id,
-                    })
-                }
-                tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
-                    delimiter: delim_to_external(subtree.delimiter),
-                    stream: if subtree.token_trees.is_empty() {
-                        None
-                    } else {
-                        Some(subtree.token_trees.into_iter().collect())
-                    },
-                    span: bridge::DelimSpan::from_single(
-                        subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
-                    ),
-                }),
-            })
-            .collect()
-    }
-}
-
-fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
-    let kind = match d {
-        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
-        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
-        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
-        bridge::Delimiter::None => return None,
-    };
-    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
-}
-
-fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
-    match d.map(|it| it.kind) {
-        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
-        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
-        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
-        None => bridge::Delimiter::None,
-    }
-}
-
-fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
-    match spacing {
-        bridge::Spacing::Alone => Spacing::Alone,
-        bridge::Spacing::Joint => Spacing::Joint,
-    }
-}
-
-fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
-    match spacing {
-        Spacing::Alone => bridge::Spacing::Alone,
-        Spacing::Joint => bridge::Spacing::Joint,
-    }
-}
-
-impl server::Ident for Rustc {
-    fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
-        IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
-    }
-
-    fn span(&mut self, ident: Self::Ident) -> Self::Span {
-        self.ident_interner.get(ident.0).0.id
-    }
-    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
-        let data = self.ident_interner.get(ident.0);
-        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
-        IdentId(self.ident_interner.intern(&new))
-    }
-}
-
-impl server::Literal for Rustc {
-    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
-        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
-        // They must still be present to be ABI-compatible and work with upstream proc_macro.
-        "".to_owned()
-    }
-    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
-        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
-    }
-    fn symbol(&mut self, literal: &Self::Literal) -> String {
-        literal.text.to_string()
-    }
-    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
-        None
-    }
-
-    fn to_string(&mut self, literal: &Self::Literal) -> String {
-        literal.to_string()
-    }
-
-    fn integer(&mut self, n: &str) -> Self::Literal {
-        let n = match n.parse::<i128>() {
-            Ok(n) => n.to_string(),
-            Err(_) => n.parse::<u128>().unwrap().to_string(),
-        };
-        Literal { text: n.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
-        macro_rules! def_suffixed_integer {
-            ($kind:ident, $($ty:ty),*) => {
-                match $kind {
-                    $(
-                        stringify!($ty) => {
-                            let n: $ty = n.parse().unwrap();
-                            format!(concat!("{}", stringify!($ty)), n)
-                        }
-                    )*
-                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
-                }
-            }
-        }
-
-        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
-
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn float(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let mut text = f64::to_string(&n);
-        if !text.contains('.') {
-            text += ".0"
-        }
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f32(&mut self, n: &str) -> Self::Literal {
-        let n: f32 = n.parse().unwrap();
-        let text = format!("{}f32", n);
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f64(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let text = format!("{}f64", n);
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn string(&mut self, string: &str) -> Self::Literal {
-        let mut escaped = String::new();
-        for ch in string.chars() {
-            escaped.extend(ch.escape_debug());
-        }
-        Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn character(&mut self, ch: char) -> Self::Literal {
-        Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
-        let string = bytes
-            .iter()
-            .cloned()
-            .flat_map(ascii::escape_default)
-            .map(Into::<char>::into)
-            .collect::<String>();
-
-        Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
-        literal.id
-    }
-
-    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
-        literal.id = span;
-    }
-
-    fn subspan(
-        &mut self,
-        _literal: &Self::Literal,
-        _start: Bound<usize>,
-        _end: Bound<usize>,
-    ) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-}
-
-impl server::SourceFile for Rustc {
-    // FIXME these are all stubs
-    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
-        true
-    }
-    fn path(&mut self, _file: &Self::SourceFile) -> String {
-        String::new()
-    }
-    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
-        true
-    }
-}
-
-impl server::Diagnostic for Rustc {
-    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
-        let mut diag = Diagnostic::new(level, msg);
-        diag.spans = spans;
-        diag
-    }
-
-    fn sub(
-        &mut self,
-        _diag: &mut Self::Diagnostic,
-        _level: Level,
-        _msg: &str,
-        _spans: Self::MultiSpan,
-    ) {
-        // FIXME handle diagnostic
-        //
-    }
-
-    fn emit(&mut self, _diag: Self::Diagnostic) {
-        // FIXME handle diagnostic
-        // diag.emit()
-    }
-}
-
-impl server::Span for Rustc {
-    fn debug(&mut self, span: Self::Span) -> String {
-        format!("{:?}", span.0)
-    }
-    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
-        SourceFile {}
-    }
-    fn save_span(&mut self, _span: Self::Span) -> usize {
-        // FIXME stub
-        0
-    }
-    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
-        // FIXME stub
-        tt::TokenId::unspecified()
-    }
-    /// Recent feature, not yet in the proc_macro
-    ///
-    /// See PR:
-    /// https://github.com/rust-lang/rust/pull/55780
-    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
-        None
-    }
-
-    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-    fn source(&mut self, span: Self::Span) -> Self::Span {
-        // FIXME handle span
-        span
-    }
-    fn start(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn end(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
-        // Just return the first span again, because some macros will unwrap the result.
-        Some(first)
-    }
-    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn after(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-
-    fn before(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-}
-
-impl server::MultiSpan for Rustc {
-    fn new(&mut self) -> Self::MultiSpan {
-        // FIXME handle span
-        vec![]
-    }
-
-    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
-        //TODP
-        other.push(span)
-    }
-}
-
-impl server::Server for Rustc {
-    fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
-        bridge::ExpnGlobals {
-            def_site: Span::unspecified(),
-            call_site: Span::unspecified(),
-            mixed_site: Span::unspecified(),
-        }
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::super::proc_macro::bridge::server::Literal;
-    use super::*;
-
-    #[test]
-    fn test_rustc_server_literals() {
-        let mut srv = Rustc { ident_interner: IdentInterner::default() };
-        assert_eq!(srv.integer("1234").text, "1234");
-
-        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
-        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
-        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
-        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
-        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
-        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
-        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
-        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
-        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
-        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
-        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
-        assert_eq!(srv.float("0").text, "0.0");
-        assert_eq!(srv.float("15684.5867").text, "15684.5867");
-        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
-        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
-
-        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
-        assert_eq!(srv.character('c').text, "'c'");
-        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
-
-        // u128::max
-        assert_eq!(
-            srv.integer("340282366920938463463374607431768211455").text,
-            "340282366920938463463374607431768211455"
-        );
-        // i128::min
-        assert_eq!(
-            srv.integer("-170141183460469231731687303715884105728").text,
-            "-170141183460469231731687303715884105728"
-        );
-    }
-
-    #[test]
-    fn test_rustc_server_to_string() {
-        let s = TokenStream {
-            token_trees: vec![
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "struct".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "T".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
-                        kind: tt::DelimiterKind::Brace,
-                    }),
-                    token_trees: vec![],
-                }),
-            ],
-        };
-
-        assert_eq!(s.to_string(), "struct T {}");
-    }
-
-    #[test]
-    fn test_rustc_server_from_str() {
-        use std::str::FromStr;
-        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                id: tt::TokenId::unspecified(),
-                kind: tt::DelimiterKind::Parenthesis,
-            }),
-            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "a".into(),
-                id: tt::TokenId::unspecified(),
-            }))],
-        });
-
-        let t1 = TokenStream::from_str("(a)").unwrap();
-        assert_eq!(t1.token_trees.len(), 1);
-        assert_eq!(t1.token_trees[0], subtree_paren_a);
-
-        let t2 = TokenStream::from_str("(a);").unwrap();
-        assert_eq!(t2.token_trees.len(), 2);
-        assert_eq!(t2.token_trees[0], subtree_paren_a);
-
-        let underscore = TokenStream::from_str("_").unwrap();
-        assert_eq!(
-            underscore.token_trees[0],
-            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "_".into(),
-                id: tt::TokenId::unspecified(),
-            }))
-        );
-    }
-}