]> git.lizzy.rs Git - rust.git/blob - src/librustdoc/html/highlight.rs
auto merge of #15421 : catharsis/rust/doc-ffi-minor-fixes, r=alexcrichton
[rust.git] / src / librustdoc / html / highlight.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Basic html highlighting functionality
12 //!
13 //! This module uses libsyntax's lexer to provide token-based highlighting for
14 //! the HTML documentation generated by rustdoc.
15
16 use std::str;
17 use std::io;
18
19 use syntax::parse;
20 use syntax::parse::lexer;
21
22 use html::escape::Escape;
23
24 use t = syntax::parse::token;
25
26 /// Highlights some source code, returning the HTML output.
27 pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String {
28     debug!("highlighting: ================\n{}\n==============", src);
29     let sess = parse::new_parse_sess();
30     let fm = parse::string_to_filemap(&sess,
31                                       src.to_string(),
32                                       "<stdin>".to_string());
33
34     let mut out = io::MemWriter::new();
35     doit(&sess,
36          lexer::StringReader::new(&sess.span_diagnostic, fm),
37          class,
38          id,
39          &mut out).unwrap();
40     str::from_utf8_lossy(out.unwrap().as_slice()).to_string()
41 }
42
43 /// Exhausts the `lexer` writing the output into `out`.
44 ///
45 /// The general structure for this method is to iterate over each token,
46 /// possibly giving it an HTML span with a class specifying what flavor of token
47 /// it's used. All source code emission is done as slices from the source map,
48 /// not from the tokens themselves, in order to stay true to the original
49 /// source.
50 fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
51         class: Option<&str>, id: Option<&str>,
52         out: &mut Writer) -> io::IoResult<()> {
53     use syntax::parse::lexer::Reader;
54
55     try!(write!(out, "<pre "));
56     match id {
57         Some(id) => try!(write!(out, "id='{}' ", id)),
58         None => {}
59     }
60     try!(write!(out, "class='rust {}'>\n", class.unwrap_or("")));
61     let mut is_attribute = false;
62     let mut is_macro = false;
63     let mut is_macro_nonterminal = false;
64     loop {
65         let next = lexer.next_token();
66
67         let snip = |sp| sess.span_diagnostic.cm.span_to_snippet(sp).unwrap();
68
69         if next.tok == t::EOF { break }
70
71         let klass = match next.tok {
72             t::WS => {
73                 try!(write!(out, "{}", Escape(snip(next.sp).as_slice())));
74                 continue
75             },
76             t::COMMENT => {
77                 try!(write!(out, "<span class='comment'>{}</span>",
78                             Escape(snip(next.sp).as_slice())));
79                 continue
80             },
81             t::SHEBANG(s) => {
82                 try!(write!(out, "{}", Escape(s.as_str())));
83                 continue
84             },
85             // If this '&' token is directly adjacent to another token, assume
86             // that it's the address-of operator instead of the and-operator.
87             // This allows us to give all pointers their own class (`Box` and
88             // `@` are below).
89             t::BINOP(t::AND) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
90             t::AT | t::TILDE => "kw-2",
91
92             // consider this as part of a macro invocation if there was a
93             // leading identifier
94             t::NOT if is_macro => { is_macro = false; "macro" }
95
96             // operators
97             t::EQ | t::LT | t::LE | t::EQEQ | t::NE | t::GE | t::GT |
98                 t::ANDAND | t::OROR | t::NOT | t::BINOP(..) | t::RARROW |
99                 t::BINOPEQ(..) | t::FAT_ARROW => "op",
100
101             // miscellaneous, no highlighting
102             t::DOT | t::DOTDOT | t::DOTDOTDOT | t::COMMA | t::SEMI |
103                 t::COLON | t::MOD_SEP | t::LARROW | t::LPAREN |
104                 t::RPAREN | t::LBRACKET | t::LBRACE | t::RBRACE | t::QUESTION => "",
105             t::DOLLAR => {
106                 if t::is_ident(&lexer.peek().tok) {
107                     is_macro_nonterminal = true;
108                     "macro-nonterminal"
109                 } else {
110                     ""
111                 }
112             }
113
114             // This is the start of an attribute. We're going to want to
115             // continue highlighting it as an attribute until the ending ']' is
116             // seen, so skip out early. Down below we terminate the attribute
117             // span when we see the ']'.
118             t::POUND => {
119                 is_attribute = true;
120                 try!(write!(out, r"<span class='attribute'>#"));
121                 continue
122             }
123             t::RBRACKET => {
124                 if is_attribute {
125                     is_attribute = false;
126                     try!(write!(out, "]</span>"));
127                     continue
128                 } else {
129                     ""
130                 }
131             }
132
133             // text literals
134             t::LIT_BYTE(..) | t::LIT_BINARY(..) | t::LIT_BINARY_RAW(..) |
135                 t::LIT_CHAR(..) | t::LIT_STR(..) | t::LIT_STR_RAW(..) => "string",
136
137             // number literals
138             t::LIT_INTEGER(..) | t::LIT_FLOAT(..) => "number",
139
140             // keywords are also included in the identifier set
141             t::IDENT(ident, _is_mod_sep) => {
142                 match t::get_ident(ident).get() {
143                     "ref" | "mut" => "kw-2",
144
145                     "self" => "self",
146                     "false" | "true" => "boolval",
147
148                     "Option" | "Result" => "prelude-ty",
149                     "Some" | "None" | "Ok" | "Err" => "prelude-val",
150
151                     _ if t::is_any_keyword(&next.tok) => "kw",
152                     _ => {
153                         if is_macro_nonterminal {
154                             is_macro_nonterminal = false;
155                             "macro-nonterminal"
156                         } else if lexer.peek().tok == t::NOT {
157                             is_macro = true;
158                             "macro"
159                         } else {
160                             "ident"
161                         }
162                     }
163                 }
164             }
165
166             t::LIFETIME(..) => "lifetime",
167             t::DOC_COMMENT(..) => "doccomment",
168             t::UNDERSCORE | t::EOF | t::INTERPOLATED(..) => "",
169         };
170
171         // as mentioned above, use the original source code instead of
172         // stringifying this token
173         let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap();
174         if klass == "" {
175             try!(write!(out, "{}", Escape(snip.as_slice())));
176         } else {
177             try!(write!(out, "<span class='{}'>{}</span>", klass,
178                           Escape(snip.as_slice())));
179         }
180     }
181
182     write!(out, "</pre>\n")
183 }