use syntax::codemap::{dummy_spanned, Spanned};
use syntax::feature_gate::UnstableFeatures;
use syntax::ptr::P;
-use syntax::symbol::keywords;
+use syntax::symbol::keywords::{self, Keyword};
use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{self, DUMMY_SP, Pos, FileName};
use std::iter::{FromIterator, once};
use rustc_data_structures::sync::Lrc;
use std::rc::Rc;
+use std::str::FromStr;
use std::cell::RefCell;
use std::sync::Arc;
use std::u32;
_ => unreachable!(),
}
- let ExternalCrate { name, src, primitives, .. } = LOCAL_CRATE.clean(cx);
+ let ExternalCrate { name, src, primitives, keywords, .. } = LOCAL_CRATE.clean(cx);
{
let m = match module.inner {
ModuleItem(ref mut m) => m,
inner: PrimitiveItem(prim),
}
}));
+ m.items.extend(keywords.into_iter().map(|(def_id, kw, attrs)| {
+ Item {
+ source: Span::empty(),
+ name: Some(kw.clone()),
+ attrs: attrs,
+ visibility: Some(Public),
+ stability: get_stability(cx, def_id),
+ deprecation: get_deprecation(cx, def_id),
+ def_id,
+ inner: KeywordItem(kw),
+ }
+ }));
}
let mut access_levels = cx.access_levels.borrow_mut();
pub src: FileName,
pub attrs: Attributes,
pub primitives: Vec<(DefId, PrimitiveType, Attributes)>,
+ pub keywords: Vec<(DefId, String, Attributes)>,
}
impl Clean<ExternalCrate> for CrateNum {
.filter_map(as_primitive).collect()
};
+ let as_keyword = |def: Def| {
+ if let Def::Mod(def_id) = def {
+ let attrs = cx.tcx.get_attrs(def_id).clean(cx);
+ let mut keyword = None;
+ for attr in attrs.lists("doc") {
+ if let Some(v) = attr.value_str() {
+ if attr.check_name("keyword") {
+ keyword = Keyword::from_str(&v.as_str()).ok()
+ .map(|x| x.name().to_string());
+ if keyword.is_some() {
+ break
+ }
+ // FIXME: should warn on unknown keywords?
+ }
+ }
+ }
+ return keyword.map(|p| (def_id, p, attrs));
+ }
+ None
+ };
+ let keywords = if root.is_local() {
+ cx.tcx.hir.krate().module.item_ids.iter().filter_map(|&id| {
+ let item = cx.tcx.hir.expect_item(id.id);
+ match item.node {
+ hir::ItemMod(_) => {
+ as_keyword(Def::Mod(cx.tcx.hir.local_def_id(id.id)))
+ }
+ hir::ItemUse(ref path, hir::UseKind::Single)
+ if item.vis == hir::Visibility::Public => {
+ as_keyword(path.def).map(|(_, prim, attrs)| {
+ (cx.tcx.hir.local_def_id(id.id), prim, attrs)
+ })
+ }
+ _ => None
+ }
+ }).collect()
+ } else {
+ cx.tcx.item_children(root).iter().map(|item| item.def)
+ .filter_map(as_keyword).collect()
+ };
+
ExternalCrate {
name: cx.tcx.crate_name(*self).to_string(),
src: krate_src,
attrs: cx.tcx.get_attrs(root).clean(cx),
primitives,
+ keywords,
}
}
}
pub fn is_extern_crate(&self) -> bool {
self.type_() == ItemType::ExternCrate
}
+ pub fn is_keyword(&self) -> bool {
+ self.type_() == ItemType::Keyword
+ }
pub fn is_stripped(&self) -> bool {
match self.inner { StrippedItem(..) => true, _ => false }
AssociatedTypeItem(Vec<TyParamBound>, Option<Type>),
/// An item that has been stripped by a rustdoc pass
StrippedItem(Box<ItemEnum>),
+ KeywordItem(String),
}
impl ItemEnum {
link_range: Option<Range<usize>>,
) {
let sp = span_of_attrs(attrs);
- let mut diag = cx.sess()
- .struct_span_warn(sp, &format!("[{}] cannot be resolved, ignoring it...", path_str));
+ let msg = format!("`[{}]` cannot be resolved, ignoring it...", path_str);
- if let Some(link_range) = link_range {
+ let code_dox = sp.to_src(cx);
+
+ let doc_comment_padding = 3;
+ let mut diag = if let Some(link_range) = link_range {
// blah blah blah\nblah\nblah [blah] blah blah\nblah blah
// ^ ~~~~~~
// | link_range
// last_new_line_offset
- let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
- let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
-
- // Print the line containing the `link_range` and manually mark it with '^'s
- diag.note(&format!(
- "the link appears in this line:\n\n{line}\n{indicator: <before$}{indicator:^<found$}",
- line=line,
- indicator="",
- before=link_range.start - last_new_line_offset,
- found=link_range.len(),
- ));
- } else {
+ let mut diag;
+ if dox.lines().count() == code_dox.lines().count() {
+ let line_offset = dox[..link_range.start].lines().count();
+ // The span starts in the `///`, so we don't have to account for the leading whitespace
+ let code_dox_len = if line_offset <= 1 {
+ doc_comment_padding
+ } else {
+ // The first `///`
+ doc_comment_padding +
+ // Each subsequent leading whitespace and `///`
+ code_dox.lines().skip(1).take(line_offset - 1).fold(0, |sum, line| {
+ sum + doc_comment_padding + line.len() - line.trim().len()
+ })
+ };
- }
+ // Extract the specific span
+ let sp = sp.from_inner_byte_pos(
+ link_range.start + code_dox_len,
+ link_range.end + code_dox_len,
+ );
+ diag = cx.sess().struct_span_warn(sp, &msg);
+ diag.span_label(sp, "cannot be resolved, ignoring");
+ } else {
+ diag = cx.sess().struct_span_warn(sp, &msg);
+
+ let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
+ let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
+
+ // Print the line containing the `link_range` and manually mark it with '^'s
+ diag.note(&format!(
+ "the link appears in this line:\n\n{line}\n\
+ {indicator: <before$}{indicator:^<found$}",
+ line=line,
+ indicator="",
+ before=link_range.start - last_new_line_offset,
+ found=link_range.len(),
+ ));
+ }
+ diag
+ } else {
+ cx.sess().struct_span_warn(sp, &msg)
+ };
diag.emit();
}