]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_session/src/utils.rs
rustc_session: Use `Iterator::find` instead of manual search
[rust.git] / compiler / rustc_session / src / utils.rs
1 use crate::parse::ParseSess;
2 use crate::session::Session;
3 use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
4 use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
5 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
6 use rustc_data_structures::profiling::VerboseTimingGuard;
7 use std::path::{Path, PathBuf};
8
9 pub type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
10
11 impl Session {
12     pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
13         self.prof.verbose_generic_activity(what)
14     }
15     pub fn time<R>(&self, what: &'static str, f: impl FnOnce() -> R) -> R {
16         self.prof.verbose_generic_activity(what).run(f)
17     }
18 }
19
20 #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
21 pub enum NativeLibKind {
22     /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC)
23     Static {
24         /// Whether to bundle objects from static library into produced rlib
25         bundle: Option<bool>,
26         /// Whether to link static library without throwing any object files away
27         whole_archive: Option<bool>,
28     },
29     /// Dynamic library (e.g. `libfoo.so` on Linux)
30     /// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC).
31     Dylib {
32         /// Whether the dynamic library will be linked only if it satifies some undefined symbols
33         as_needed: Option<bool>,
34     },
35     /// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library.
36     RawDylib,
37     /// A macOS-specific kind of dynamic libraries.
38     Framework {
39         /// Whether the framework will be linked only if it satifies some undefined symbols
40         as_needed: Option<bool>,
41     },
42     /// The library kind wasn't specified, `Dylib` is currently used as a default.
43     Unspecified,
44 }
45
46 rustc_data_structures::impl_stable_hash_via_hash!(NativeLibKind);
47
48 #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
49 pub struct NativeLib {
50     pub name: String,
51     pub new_name: Option<String>,
52     pub kind: NativeLibKind,
53     pub verbatim: Option<bool>,
54 }
55
56 rustc_data_structures::impl_stable_hash_via_hash!(NativeLib);
57
58 /// A path that has been canonicalized along with its original, non-canonicalized form
59 #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
60 pub struct CanonicalizedPath {
61     // Optional since canonicalization can sometimes fail
62     canonicalized: Option<PathBuf>,
63     original: PathBuf,
64 }
65
66 impl CanonicalizedPath {
67     pub fn new(path: &Path) -> Self {
68         Self { original: path.to_owned(), canonicalized: std::fs::canonicalize(path).ok() }
69     }
70
71     pub fn canonicalized(&self) -> &PathBuf {
72         self.canonicalized.as_ref().unwrap_or(self.original())
73     }
74
75     pub fn original(&self) -> &PathBuf {
76         &self.original
77     }
78 }
79
80 // FIXME: Find a better spot for this - it needs to be accessible from `rustc_ast_lowering`,
81 // and needs to access `ParseSess
82 pub struct FlattenNonterminals<'a> {
83     pub parse_sess: &'a ParseSess,
84     pub synthesize_tokens: CanSynthesizeMissingTokens,
85     pub nt_to_tokenstream: NtToTokenstream,
86 }
87
88 impl<'a> FlattenNonterminals<'a> {
89     pub fn process_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
90         fn can_skip(stream: &TokenStream) -> bool {
91             stream.trees().all(|tree| match tree {
92                 TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
93                 TokenTree::Delimited(_, _, inner) => can_skip(&inner),
94             })
95         }
96
97         if can_skip(&tokens) {
98             return tokens;
99         }
100
101         tokens.into_trees().flat_map(|tree| self.process_token_tree(tree).into_trees()).collect()
102     }
103
104     pub fn process_token_tree(&mut self, tree: TokenTree) -> TokenStream {
105         match tree {
106             TokenTree::Token(token) => self.process_token(token),
107             TokenTree::Delimited(span, delim, tts) => {
108                 TokenTree::Delimited(span, delim, self.process_token_stream(tts)).into()
109             }
110         }
111     }
112
113     pub fn process_token(&mut self, token: Token) -> TokenStream {
114         match token.kind {
115             token::Interpolated(nt) => {
116                 let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
117                 TokenTree::Delimited(
118                     DelimSpan::from_single(token.span),
119                     DelimToken::NoDelim,
120                     self.process_token_stream(tts),
121                 )
122                 .into()
123             }
124             _ => TokenTree::Token(token).into(),
125         }
126     }
127 }