1 //! Basic syntax highlighting functionality.
3 //! This module uses libsyntax's lexer to provide token-based highlighting for
4 //! the HTML documentation generated by rustdoc.
6 //! Use the `render_with_highlighting` to highlight some rust code.
8 use crate::html::escape::Escape;
10 use std::fmt::Display;
12 use std::io::prelude::*;
14 use syntax::source_map::{SourceMap, FilePathMapping};
15 use syntax::parse::lexer;
16 use syntax::parse::token::{self, Token};
18 use syntax::symbol::{kw, sym};
19 use syntax_pos::{Span, FileName};
21 /// Highlights `src`, returning the HTML output.
22 pub fn render_with_highlighting(
25 extension: Option<&str>,
26 tooltip: Option<(&str, &str)>,
28 debug!("highlighting: ================\n{}\n==============", src);
29 let mut out = Vec::new();
30 if let Some((tooltip, class)) = tooltip {
31 write!(out, "<div class='information'><div class='tooltip {}'>ⓘ<span \
32 class='tooltiptext'>{}</span></div></div>",
33 class, tooltip).unwrap();
36 let sess = parse::ParseSess::new(FilePathMapping::empty());
37 let fm = sess.source_map().new_source_file(
38 FileName::Custom(String::from("rustdoc-highlighting")),
41 let highlight_result =
42 lexer::StringReader::new_or_buffered_errs(&sess, fm, None).and_then(|lexer| {
43 let mut classifier = Classifier::new(lexer, sess.source_map());
45 let mut highlighted_source = vec![];
46 if classifier.write_source(&mut highlighted_source).is_err() {
47 Err(classifier.lexer.buffer_fatal_errors())
49 Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
53 match highlight_result {
54 Ok(highlighted_source) => {
55 write_header(class, &mut out).unwrap();
56 write!(out, "{}", highlighted_source).unwrap();
57 if let Some(extension) = extension {
58 write!(out, "{}", extension).unwrap();
60 write_footer(&mut out).unwrap();
63 // If errors are encountered while trying to highlight, cancel the errors and just emit
64 // the unhighlighted source. The errors will have already been reported in the
65 // `check-code-block-syntax` pass.
66 for mut error in errors {
70 write!(out, "<pre><code>{}</code></pre>", src).unwrap();
74 String::from_utf8_lossy(&out[..]).into_owned()
77 /// Processes a program (nested in the internal `lexer`), classifying strings of
78 /// text by highlighting category (`Class`). Calls out to a `Writer` to write
79 /// each span of text in sequence.
80 struct Classifier<'a> {
81 lexer: lexer::StringReader<'a>,
82 source_map: &'a SourceMap,
84 // State of the classifier.
87 in_macro_nonterminal: bool,
90 /// How a span of text is classified. Mostly corresponds to token kinds.
91 #[derive(Clone, Copy, Debug, Eq, PartialEq)]
98 // Keywords that do pointer/reference stuff.
114 /// Trait that controls writing the output of syntax highlighting. Users should
115 /// implement this trait to customize writing output.
117 /// The classifier will call into the `Writer` implementation as it finds spans
118 /// of text to highlight. Exactly how that text should be highlighted is up to
119 /// the implementation.
121 /// Called when we start processing a span of text that should be highlighted.
122 /// The `Class` argument specifies how it should be highlighted.
123 fn enter_span(&mut self, _: Class) -> io::Result<()>;
125 /// Called at the end of a span of highlighted text.
126 fn exit_span(&mut self) -> io::Result<()>;
128 /// Called for a span of text. If the text should be highlighted differently from the
129 /// surrounding text, then the `Class` argument will be a value other than `None`.
131 /// The following sequences of callbacks are equivalent:
133 /// enter_span(Foo), string("text", None), exit_span()
134 /// string("text", Foo)
136 /// The latter can be thought of as a shorthand for the former, which is
138 fn string<T: Display>(&mut self,
144 // Implement `Writer` for anthing that can be written to, this just implements
145 // the default rustdoc behaviour.
146 impl<U: Write> Writer for U {
147 fn string<T: Display>(&mut self,
152 Class::None => write!(self, "{}", text),
153 klass => write!(self, "<span class=\"{}\">{}</span>", klass.rustdoc_class(), text),
157 fn enter_span(&mut self, klass: Class) -> io::Result<()> {
158 write!(self, "<span class=\"{}\">", klass.rustdoc_class())
161 fn exit_span(&mut self) -> io::Result<()> {
162 write!(self, "</span>")
166 enum HighlightError {
171 impl From<io::Error> for HighlightError {
172 fn from(err: io::Error) -> Self {
173 HighlightError::IoError(err)
177 impl<'a> Classifier<'a> {
178 fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<'a> {
184 in_macro_nonterminal: false,
188 /// Gets the next token out of the lexer.
189 fn try_next_token(&mut self) -> Result<Token, HighlightError> {
190 match self.lexer.try_next_token() {
191 Ok(token) => Ok(token),
192 Err(_) => Err(HighlightError::LexError),
196 /// Exhausts the `lexer` writing the output into `out`.
198 /// The general structure for this method is to iterate over each token,
199 /// possibly giving it an HTML span with a class specifying what flavor of token
200 /// is used. All source code emission is done as slices from the source map,
201 /// not from the tokens themselves, in order to stay true to the original
203 fn write_source<W: Writer>(&mut self,
205 -> Result<(), HighlightError> {
207 let next = self.try_next_token()?;
208 if next == token::Eof {
212 self.write_token(out, next)?;
218 // Handles an individual token from the lexer.
219 fn write_token<W: Writer>(&mut self,
222 -> Result<(), HighlightError> {
223 let klass = match token.kind {
224 token::Shebang(s) => {
225 out.string(Escape(&s.as_str()), Class::None)?;
229 token::Whitespace => Class::None,
230 token::Comment => Class::Comment,
231 token::DocComment(..) => Class::DocComment,
233 // If this '&' or '*' token is followed by a non-whitespace token, assume that it's the
234 // reference or dereference operator or a reference or pointer type, instead of the
235 // bit-and or multiplication operator.
236 token::BinOp(token::And) | token::BinOp(token::Star)
237 if self.lexer.peek() != token::Whitespace => Class::RefKeyWord,
239 // Consider this as part of a macro invocation if there was a
240 // leading identifier.
241 token::Not if self.in_macro => {
242 self.in_macro = false;
247 token::Eq | token::Lt | token::Le | token::EqEq | token::Ne | token::Ge | token::Gt |
248 token::AndAnd | token::OrOr | token::Not | token::BinOp(..) | token::RArrow |
249 token::BinOpEq(..) | token::FatArrow => Class::Op,
251 // Miscellaneous, no highlighting.
252 token::Dot | token::DotDot | token::DotDotDot | token::DotDotEq | token::Comma |
253 token::Semi | token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
254 token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
255 token::CloseDelim(token::NoDelim) => Class::None,
257 token::Question => Class::QuestionMark,
260 if self.lexer.peek().kind.is_ident() {
261 self.in_macro_nonterminal = true;
262 Class::MacroNonTerminal
268 // This might be the start of an attribute. We're going to want to
269 // continue highlighting it as an attribute until the ending ']' is
270 // seen, so skip out early. Down below we terminate the attribute
271 // span when we see the ']'.
273 // We can't be sure that our # begins an attribute (it could
274 // just be appearing in a macro) until we read either `#![` or
275 // `#[` from the input stream.
277 // We don't want to start highlighting as an attribute until
278 // we're confident there is going to be a ] coming up, as
279 // otherwise # tokens in macros highlight the rest of the input
282 // Case 1: #![inner_attribute]
283 if self.lexer.peek() == token::Not {
284 self.try_next_token()?; // NOTE: consumes `!` token!
285 if self.lexer.peek() == token::OpenDelim(token::Bracket) {
286 self.in_attribute = true;
287 out.enter_span(Class::Attribute)?;
289 out.string("#", Class::None)?;
290 out.string("!", Class::None)?;
294 // Case 2: #[outer_attribute]
295 if self.lexer.peek() == token::OpenDelim(token::Bracket) {
296 self.in_attribute = true;
297 out.enter_span(Class::Attribute)?;
299 out.string("#", Class::None)?;
302 token::CloseDelim(token::Bracket) => {
303 if self.in_attribute {
304 self.in_attribute = false;
305 out.string("]", Class::None)?;
313 token::Literal(lit) => {
316 token::Byte | token::Char | token::Err |
317 token::ByteStr | token::ByteStrRaw(..) |
318 token::Str | token::StrRaw(..) => Class::String,
321 token::Integer | token::Float => Class::Number,
323 token::Bool => panic!("literal token contains `Lit::Bool`"),
327 // Keywords are also included in the identifier set.
328 token::Ident(ident, is_raw) => {
330 kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
332 kw::SelfLower | kw::SelfUpper => Class::Self_,
333 kw::False | kw::True if !is_raw => Class::Bool,
335 sym::Option | sym::Result => Class::PreludeTy,
336 sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
338 _ if token.is_reserved_ident() => Class::KeyWord,
341 if self.in_macro_nonterminal {
342 self.in_macro_nonterminal = false;
343 Class::MacroNonTerminal
344 } else if self.lexer.peek() == token::Not {
345 self.in_macro = true;
354 token::Lifetime(..) => Class::Lifetime,
356 token::Eof | token::Interpolated(..) |
357 token::Tilde | token::At| token::SingleQuote => Class::None,
360 // Anything that didn't return above is the simple case where we the
361 // class just spans a single token, so we can use the `string` method.
362 out.string(Escape(&self.snip(token.span)), klass)?;
367 // Helper function to get a snippet from the source_map.
368 fn snip(&self, sp: Span) -> String {
369 self.source_map.span_to_snippet(sp).unwrap()
374 /// Returns the css class expected by rustdoc for each `Class`.
375 fn rustdoc_class(self) -> &'static str {
378 Class::Comment => "comment",
379 Class::DocComment => "doccomment",
380 Class::Attribute => "attribute",
381 Class::KeyWord => "kw",
382 Class::RefKeyWord => "kw-2",
383 Class::Self_ => "self",
385 Class::Macro => "macro",
386 Class::MacroNonTerminal => "macro-nonterminal",
387 Class::String => "string",
388 Class::Number => "number",
389 Class::Bool => "bool-val",
390 Class::Ident => "ident",
391 Class::Lifetime => "lifetime",
392 Class::PreludeTy => "prelude-ty",
393 Class::PreludeVal => "prelude-val",
394 Class::QuestionMark => "question-mark"
399 fn write_header(class: Option<&str>, out: &mut dyn Write) -> io::Result<()> {
400 write!(out, "<div class=\"example-wrap\"><pre class=\"rust {}\">\n", class.unwrap_or(""))
403 fn write_footer(out: &mut dyn Write) -> io::Result<()> {
404 write!(out, "</pre></div>\n")