]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_session/src/utils.rs
Rollup merge of #84014 - estebank:cool-bears-hot-tip, r=varkor
[rust.git] / compiler / rustc_session / src / utils.rs
1 use crate::parse::ParseSess;
2 use crate::session::Session;
3 use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
4 use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
5 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
6 use rustc_data_structures::profiling::VerboseTimingGuard;
7 use std::path::{Path, PathBuf};
8
9 pub type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
10
11 impl Session {
12     pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
13         self.prof.verbose_generic_activity(what)
14     }
15     pub fn time<R>(&self, what: &'static str, f: impl FnOnce() -> R) -> R {
16         self.prof.verbose_generic_activity(what).run(f)
17     }
18 }
19
20 #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
21 pub enum NativeLibKind {
22     /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC) included
23     /// when linking a final binary, but not when archiving an rlib.
24     StaticNoBundle,
25     /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC) included
26     /// when linking a final binary, but also included when archiving an rlib.
27     StaticBundle,
28     /// Dynamic library (e.g. `libfoo.so` on Linux)
29     /// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC).
30     Dylib,
31     /// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library.
32     RawDylib,
33     /// A macOS-specific kind of dynamic libraries.
34     Framework,
35     /// The library kind wasn't specified, `Dylib` is currently used as a default.
36     Unspecified,
37 }
38
39 rustc_data_structures::impl_stable_hash_via_hash!(NativeLibKind);
40
41 /// A path that has been canonicalized along with its original, non-canonicalized form
42 #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
43 pub struct CanonicalizedPath {
44     // Optional since canonicalization can sometimes fail
45     canonicalized: Option<PathBuf>,
46     original: PathBuf,
47 }
48
49 impl CanonicalizedPath {
50     pub fn new(path: &Path) -> Self {
51         Self { original: path.to_owned(), canonicalized: std::fs::canonicalize(path).ok() }
52     }
53
54     pub fn canonicalized(&self) -> &PathBuf {
55         self.canonicalized.as_ref().unwrap_or(self.original())
56     }
57
58     pub fn original(&self) -> &PathBuf {
59         &self.original
60     }
61 }
62
63 // FIXME: Find a better spot for this - it needs to be accessible from `rustc_ast_lowering`,
64 // and needs to access `ParseSess
65 pub struct FlattenNonterminals<'a> {
66     pub parse_sess: &'a ParseSess,
67     pub synthesize_tokens: CanSynthesizeMissingTokens,
68     pub nt_to_tokenstream: NtToTokenstream,
69 }
70
71 impl<'a> FlattenNonterminals<'a> {
72     pub fn process_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
73         fn can_skip(stream: &TokenStream) -> bool {
74             stream.trees().all(|tree| match tree {
75                 TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
76                 TokenTree::Delimited(_, _, inner) => can_skip(&inner),
77             })
78         }
79
80         if can_skip(&tokens) {
81             return tokens;
82         }
83
84         tokens.into_trees().flat_map(|tree| self.process_token_tree(tree).into_trees()).collect()
85     }
86
87     pub fn process_token_tree(&mut self, tree: TokenTree) -> TokenStream {
88         match tree {
89             TokenTree::Token(token) => self.process_token(token),
90             TokenTree::Delimited(span, delim, tts) => {
91                 TokenTree::Delimited(span, delim, self.process_token_stream(tts)).into()
92             }
93         }
94     }
95
96     pub fn process_token(&mut self, token: Token) -> TokenStream {
97         match token.kind {
98             token::Interpolated(nt) => {
99                 let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
100                 TokenTree::Delimited(
101                     DelimSpan::from_single(token.span),
102                     DelimToken::NoDelim,
103                     self.process_token_stream(tts),
104                 )
105                 .into()
106             }
107             _ => TokenTree::Token(token).into(),
108         }
109     }
110 }