]> git.lizzy.rs Git - rust.git/blobdiff - crates/proc_macro_srv/src/rustc_server.rs
check rustc major version == 1 not < 1
[rust.git] / crates / proc_macro_srv / src / rustc_server.rs
index 7d1695c86a8f33037f701eb08e6062e04e0db405..e252e89a5697c26a70c6ccca8a3fdab45847e186 100644 (file)
@@ -1,19 +1,19 @@
 //! Rustc proc-macro server implementation with tt
 //!
-//! Based on idea from https://github.com/fedochet/rust-proc-macro-expander
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
 //! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
 //! we could provide any TokenStream implementation.
 //! The original idea from fedochet is using proc-macro2 as backend,
-//! we use tt instead for better intergation with RA.
+//! we use tt instead for better integration with RA.
 //!
 //! FIXME: No span and source file information is implemented yet
 
 use crate::proc_macro::bridge::{self, server};
 
-use std::collections::{Bound, HashMap};
+use std::collections::HashMap;
 use std::hash::Hash;
 use std::iter::FromIterator;
-use std::str::FromStr;
+use std::ops::Bound;
 use std::{ascii, vec::IntoIter};
 
 type Group = tt::Subtree;
 
 #[derive(Debug, Clone)]
 pub struct TokenStream {
-    pub subtree: tt::Subtree,
+    pub token_trees: Vec<TokenTree>,
 }
 
 impl TokenStream {
     pub fn new() -> Self {
-        TokenStream { subtree: Default::default() }
+        TokenStream { token_trees: Default::default() }
     }
 
     pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        TokenStream { subtree }
+        if subtree.delimiter.is_some() {
+            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+        } else {
+            TokenStream { token_trees: subtree.token_trees }
+        }
+    }
+
+    pub fn into_subtree(self) -> tt::Subtree {
+        tt::Subtree { delimiter: None, token_trees: self.token_trees }
     }
 
     pub fn is_empty(&self) -> bool {
-        self.subtree.token_trees.is_empty()
+        self.token_trees.is_empty()
     }
 }
 
 /// Creates a token stream containing a single token tree.
 impl From<TokenTree> for TokenStream {
     fn from(tree: TokenTree) -> TokenStream {
-        TokenStream { subtree: tt::Subtree { delimiter: None, token_trees: vec![tree] } }
+        TokenStream { token_trees: vec![tree] }
     }
 }
 
@@ -78,10 +86,10 @@ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
             for tkn in item {
                 match tkn {
                     tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
-                        self.subtree.token_trees.extend(subtree.token_trees);
+                        self.token_trees.extend(subtree.token_trees);
                     }
                     _ => {
-                        self.subtree.token_trees.push(tkn);
+                        self.token_trees.push(tkn);
                     }
                 }
             }
@@ -164,7 +172,7 @@ impl IntoIterator for TokenStream {
         type IntoIter = super::IntoIter<TokenTree>;
 
         fn into_iter(self) -> Self::IntoIter {
-            self.subtree.token_trees.into_iter()
+            self.token_trees.into_iter()
         }
     }
 
@@ -184,45 +192,88 @@ fn from_str(src: &str) -> Result<TokenStream, LexError> {
             let (subtree, _token_map) =
                 mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
 
-            let tt: tt::TokenTree = subtree.into();
-            Ok(tt.into())
+            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+            Ok(TokenStream::with_subtree(subtree))
         }
     }
 
     impl ToString for TokenStream {
         fn to_string(&self) -> String {
-            let tt = self.subtree.clone().into();
-            to_text(&tt)
-        }
-    }
-
-    fn to_text(tkn: &tt::TokenTree) -> String {
-        match tkn {
-            tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.text.clone().into(),
-            tt::TokenTree::Leaf(tt::Leaf::Literal(literal)) => literal.text.clone().into(),
-            tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => format!("{}", punct.char),
-            tt::TokenTree::Subtree(subtree) => {
-                let content = subtree
-                    .token_trees
-                    .iter()
-                    .map(|tkn| {
-                        let s = to_text(tkn);
+            return tokentrees_to_text(&self.token_trees[..]);
+
+            fn tokentrees_to_text(tkns: &[tt::TokenTree]) -> String {
+                tkns.iter()
+                    .fold((String::new(), true), |(last, last_to_joint), tkn| {
+                        let s = [last, tokentree_to_text(tkn)].join(if last_to_joint {
+                            ""
+                        } else {
+                            " "
+                        });
+                        let mut is_joint = false;
                         if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn {
-                            if punct.spacing == tt::Spacing::Alone {
-                                return s + " ";
+                            if punct.spacing == tt::Spacing::Joint {
+                                is_joint = true;
                             }
                         }
-                        s
+                        (s, is_joint)
                     })
-                    .collect::<Vec<_>>()
-                    .concat();
-                let (open, close) = match subtree.delimiter.map(|it| it.kind) {
-                    None => ("", ""),
-                    Some(tt::DelimiterKind::Brace) => ("{", "}"),
-                    Some(tt::DelimiterKind::Parenthesis) => ("(", ")"),
-                    Some(tt::DelimiterKind::Bracket) => ("[", "]"),
-                };
-                format!("{}{}{}", open, content, close)
+                    .0
+            }
+
+            fn tokentree_to_text(tkn: &tt::TokenTree) -> String {
+                match tkn {
+                    tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.text.clone().into(),
+                    tt::TokenTree::Leaf(tt::Leaf::Literal(literal)) => literal.text.clone().into(),
+                    tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => format!("{}", punct.char),
+                    tt::TokenTree::Subtree(subtree) => {
+                        let content = tokentrees_to_text(&subtree.token_trees);
+                        let (open, close) = match subtree.delimiter.map(|it| it.kind) {
+                            None => ("", ""),
+                            Some(tt::DelimiterKind::Brace) => ("{", "}"),
+                            Some(tt::DelimiterKind::Parenthesis) => ("(", ")"),
+                            Some(tt::DelimiterKind::Bracket) => ("[", "]"),
+                        };
+                        format!("{}{}{}", open, content, close)
+                    }
+                }
+            }
+        }
+    }
+
+    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+        tt::Subtree {
+            delimiter: subtree
+                .delimiter
+                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            token_trees: subtree
+                .token_trees
+                .into_iter()
+                .map(token_tree_replace_token_ids_with_unspecified)
+                .collect(),
+        }
+    }
+
+    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+        match tt {
+            tt::TokenTree::Leaf(leaf) => {
+                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+            }
+            tt::TokenTree::Subtree(subtree) => {
+                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+            }
+        }
+    }
+
+    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+        match leaf {
+            tt::Leaf::Literal(lit) => {
+                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+            }
+            tt::Leaf::Punct(punct) => {
+                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+            }
+            tt::Leaf::Ident(ident) => {
+                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
             }
         }
     }
@@ -242,6 +293,8 @@ fn build(self) -> TokenStream {
     }
 }
 
+pub struct FreeFunctions;
+
 #[derive(Clone)]
 pub struct TokenStreamIter {
     trees: IntoIter<TokenTree>,
@@ -254,6 +307,7 @@ pub struct Rustc {
 }
 
 impl server::Types for Rustc {
+    type FreeFunctions = FreeFunctions;
     type TokenStream = TokenStream;
     type TokenStreamBuilder = TokenStreamBuilder;
     type TokenStreamIter = TokenStreamIter;
@@ -267,6 +321,13 @@ impl server::Types for Rustc {
     type MultiSpan = Vec<Span>;
 }
 
+impl server::FreeFunctions for Rustc {
+    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+        // FIXME: track env var accesses
+        // https://github.com/rust-lang/rust/pull/71858
+    }
+}
+
 impl server::TokenStream for Rustc {
     fn new(&mut self) -> Self::TokenStream {
         Self::TokenStream::new()
@@ -276,6 +337,8 @@ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
         stream.is_empty()
     }
     fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        use std::str::FromStr;
+
         Self::TokenStream::from_str(src).expect("cannot parse string")
     }
     fn to_string(&mut self, stream: &Self::TokenStream) -> String {
@@ -382,10 +445,7 @@ fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
 
 impl server::Group for Rustc {
     fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
-        Self::Group {
-            delimiter: delim_to_internal(delimiter),
-            token_trees: stream.subtree.token_trees,
-        }
+        Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
     }
     fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
         delim_to_external(group.delimiter)
@@ -393,13 +453,11 @@ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
 
     // NOTE: Return value of do not include delimiter
     fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
-        TokenStream {
-            subtree: tt::Subtree { delimiter: None, token_trees: group.token_trees.clone() },
-        }
+        TokenStream { token_trees: group.token_trees.clone() }
     }
 
     fn span(&mut self, group: &Self::Group) -> Self::Span {
-        group.delimiter.map(|it| it.id).unwrap_or_else(|| tt::TokenId::unspecified())
+        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
     }
 
     fn set_span(&mut self, _group: &mut Self::Group, _span: Self::Span) {
@@ -476,8 +534,12 @@ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
     }
 
     fn integer(&mut self, n: &str) -> Self::Literal {
-        let n: i128 = n.parse().unwrap();
-        Literal { text: n.to_string().into(), id: tt::TokenId::unspecified() }
+        let n = if let Ok(n) = n.parse::<i128>() {
+            n.to_string()
+        } else {
+            n.parse::<u128>().unwrap().to_string()
+        };
+        Literal { text: n.into(), id: tt::TokenId::unspecified() }
     }
 
     fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
@@ -495,8 +557,7 @@ macro_rules! def_suffixed_integer {
             }
         }
 
-        let text =
-            def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128};
+        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
 
         Literal { text: text.into(), id: tt::TokenId::unspecified() }
     }
@@ -700,5 +761,73 @@ fn test_rustc_server_literals() {
         assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
         assert_eq!(srv.character('c').text, "'c'");
         assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+        // u128::max
+        assert_eq!(
+            srv.integer("340282366920938463463374607431768211455").text,
+            "340282366920938463463374607431768211455"
+        );
+        // i128::min
+        assert_eq!(
+            srv.integer("-170141183460469231731687303715884105728").text,
+            "-170141183460469231731687303715884105728"
+        );
+    }
+
+    #[test]
+    fn test_rustc_server_to_string() {
+        let s = TokenStream {
+            token_trees: vec![
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "struct".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "T".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: Some(tt::Delimiter {
+                        id: tt::TokenId::unspecified(),
+                        kind: tt::DelimiterKind::Brace,
+                    }),
+                    token_trees: vec![],
+                }),
+            ],
+        };
+
+        assert_eq!(s.to_string(), "struct T {}");
+    }
+
+    #[test]
+    fn test_rustc_server_from_str() {
+        use std::str::FromStr;
+        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+            delimiter: Some(tt::Delimiter {
+                id: tt::TokenId::unspecified(),
+                kind: tt::DelimiterKind::Parenthesis,
+            }),
+            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "a".into(),
+                id: tt::TokenId::unspecified(),
+            }))],
+        });
+
+        let t1 = TokenStream::from_str("(a)").unwrap();
+        assert_eq!(t1.token_trees.len(), 1);
+        assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+        let t2 = TokenStream::from_str("(a);").unwrap();
+        assert_eq!(t2.token_trees.len(), 2);
+        assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+        let underscore = TokenStream::from_str("_").unwrap();
+        assert_eq!(
+            underscore.token_trees[0],
+            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "_".into(),
+                id: tt::TokenId::unspecified(),
+            }))
+        );
     }
 }