X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=clippy_lints%2Fsrc%2Fdoc.rs;h=133a20b669a17720faab96f0cb22031939e01f2a;hb=0ee393cf01b59d55a54e9e4299797bd0f7339d58;hp=4950212a4b5d145f2e5c41ebd6413a3e8c76b2e6;hpb=f5db24a59fa34f93cfc1369df8f13384936170bf;p=rust.git diff --git a/clippy_lints/src/doc.rs b/clippy_lints/src/doc.rs index 4950212a4b5..133a20b669a 100644 --- a/clippy_lints/src/doc.rs +++ b/clippy_lints/src/doc.rs @@ -1,13 +1,15 @@ -use crate::utils::span_lint; +use crate::utils::{get_trait_def_id, implements_trait, is_entrypoint_fn, match_type, paths, return_ty, span_lint}; use itertools::Itertools; -use pulldown_cmark; -use rustc::lint::{EarlyContext, EarlyLintPass, LintArray, LintPass}; -use rustc::{declare_tool_lint, impl_lint_pass}; +use rustc::lint::in_external_macro; +use rustc::ty::TyKind; use rustc_data_structures::fx::FxHashSet; +use rustc_hir as hir; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::source_map::{BytePos, MultiSpan, Span}; +use rustc_span::Pos; use std::ops::Range; -use syntax::ast; -use syntax::source_map::{BytePos, Span}; -use syntax_pos::Pos; +use syntax::ast::{AttrKind, Attribute}; use url::Url; declare_clippy_lint! { @@ -34,27 +36,216 @@ "presence of `_`, `::` or camel-case outside backticks in documentation" } +declare_clippy_lint! { + /// **What it does:** Checks for the doc comments of publicly visible + /// unsafe functions and warns if there is no `# Safety` section. + /// + /// **Why is this bad?** Unsafe functions should document their safety + /// preconditions, so that users can be sure they are using them safely. + /// + /// **Known problems:** None. + /// + /// **Examples:** + /// ```rust + ///# type Universe = (); + /// /// This function should really be documented + /// pub unsafe fn start_apocalypse(u: &mut Universe) { + /// unimplemented!(); + /// } + /// ``` + /// + /// At least write a line about safety: + /// + /// ```rust + ///# type Universe = (); + /// /// # Safety + /// /// + /// /// This function should not be called before the horsemen are ready. + /// pub unsafe fn start_apocalypse(u: &mut Universe) { + /// unimplemented!(); + /// } + /// ``` + pub MISSING_SAFETY_DOC, + style, + "`pub unsafe fn` without `# Safety` docs" +} + +declare_clippy_lint! { + /// **What it does:** Checks the doc comments of publicly visible functions that + /// return a `Result` type and warns if there is no `# Errors` section. + /// + /// **Why is this bad?** Documenting the type of errors that can be returned from a + /// function can help callers write code to handle the errors appropriately. + /// + /// **Known problems:** None. + /// + /// **Examples:** + /// + /// Since the following function returns a `Result` it has an `# Errors` section in + /// its doc comment: + /// + /// ```rust + ///# use std::io; + /// /// # Errors + /// /// + /// /// Will return `Err` if `filename` does not exist or the user does not have + /// /// permission to read it. + /// pub fn read(filename: String) -> io::Result { + /// unimplemented!(); + /// } + /// ``` + pub MISSING_ERRORS_DOC, + pedantic, + "`pub fn` returns `Result` without `# Errors` in doc comment" +} + +declare_clippy_lint! { + /// **What it does:** Checks for `fn main() { .. }` in doctests + /// + /// **Why is this bad?** The test can be shorter (and likely more readable) + /// if the `fn main()` is left implicit. + /// + /// **Known problems:** None. + /// + /// **Examples:** + /// ``````rust + /// /// An example of a doctest with a `main()` function + /// /// + /// /// # Examples + /// /// + /// /// ``` + /// /// fn main() { + /// /// // this needs not be in an `fn` + /// /// } + /// /// ``` + /// fn needless_main() { + /// unimplemented!(); + /// } + /// `````` + pub NEEDLESS_DOCTEST_MAIN, + style, + "presence of `fn main() {` in code examples" +} + #[allow(clippy::module_name_repetitions)] #[derive(Clone)] pub struct DocMarkdown { valid_idents: FxHashSet, + in_trait_impl: bool, } impl DocMarkdown { pub fn new(valid_idents: FxHashSet) -> Self { - Self { valid_idents } + Self { + valid_idents, + in_trait_impl: false, + } } } -impl_lint_pass!(DocMarkdown => [DOC_MARKDOWN]); +impl_lint_pass!(DocMarkdown => [DOC_MARKDOWN, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, NEEDLESS_DOCTEST_MAIN]); -impl EarlyLintPass for DocMarkdown { - fn check_crate(&mut self, cx: &EarlyContext<'_>, krate: &ast::Crate) { +impl<'a, 'tcx> LateLintPass<'a, 'tcx> for DocMarkdown { + fn check_crate(&mut self, cx: &LateContext<'a, 'tcx>, krate: &'tcx hir::Crate<'_>) { check_attrs(cx, &self.valid_idents, &krate.attrs); } - fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) { - check_attrs(cx, &self.valid_idents, &item.attrs); + fn check_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx hir::Item<'_>) { + let headers = check_attrs(cx, &self.valid_idents, &item.attrs); + match item.kind { + hir::ItemKind::Fn(ref sig, ..) => { + if !(is_entrypoint_fn(cx, cx.tcx.hir().local_def_id(item.hir_id)) + || in_external_macro(cx.tcx.sess, item.span)) + { + lint_for_missing_headers(cx, item.hir_id, item.span, sig, headers); + } + }, + hir::ItemKind::Impl { + of_trait: ref trait_ref, + .. + } => { + self.in_trait_impl = trait_ref.is_some(); + }, + _ => {}, + } + } + + fn check_item_post(&mut self, _cx: &LateContext<'a, 'tcx>, item: &'tcx hir::Item<'_>) { + if let hir::ItemKind::Impl { .. } = item.kind { + self.in_trait_impl = false; + } + } + + fn check_trait_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx hir::TraitItem<'_>) { + let headers = check_attrs(cx, &self.valid_idents, &item.attrs); + if let hir::TraitItemKind::Method(ref sig, ..) = item.kind { + if !in_external_macro(cx.tcx.sess, item.span) { + lint_for_missing_headers(cx, item.hir_id, item.span, sig, headers); + } + } + } + + fn check_impl_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx hir::ImplItem<'_>) { + let headers = check_attrs(cx, &self.valid_idents, &item.attrs); + if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) { + return; + } + if let hir::ImplItemKind::Method(ref sig, ..) = item.kind { + lint_for_missing_headers(cx, item.hir_id, item.span, sig, headers); + } + } +} + +fn lint_for_missing_headers<'a, 'tcx>( + cx: &LateContext<'a, 'tcx>, + hir_id: hir::HirId, + span: impl Into + Copy, + sig: &hir::FnSig<'_>, + headers: DocHeaders, +) { + if !cx.access_levels.is_exported(hir_id) { + return; // Private functions do not require doc comments + } + if !headers.safety && sig.header.unsafety == hir::Unsafety::Unsafe { + span_lint( + cx, + MISSING_SAFETY_DOC, + span, + "unsafe function's docs miss `# Safety` section", + ); + } + if !headers.errors { + if match_type(cx, return_ty(cx, hir_id), &paths::RESULT) { + span_lint( + cx, + MISSING_ERRORS_DOC, + span, + "docs for function returning `Result` missing `# Errors` section", + ); + } else { + let def_id = cx.tcx.hir().local_def_id(hir_id); + let mir = cx.tcx.optimized_mir(def_id); + if let Some(future) = get_trait_def_id(cx, &paths::FUTURE) { + if implements_trait(cx, mir.return_ty(), future, &[]) { + use TyKind::*; + + if let Opaque(_, subs) = mir.return_ty().kind { + if let Some(ty) = subs.types().next() { + if let Generator(_, subs, _) = ty.kind { + if match_type(cx, subs.as_generator().return_ty(def_id, cx.tcx), &paths::RESULT) { + span_lint( + cx, + MISSING_ERRORS_DOC, + span, + "docs for function returning `Result` missing `# Errors` section", + ); + } + } + } + } + } + } + } } } @@ -65,6 +256,7 @@ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) { /// need to keep track of /// the spans but this function is inspired from the later. #[allow(clippy::cast_possible_truncation)] +#[must_use] pub fn strip_doc_comment_decoration(comment: &str, span: Span) -> (String, Vec<(usize, Span)>) { // one-line comments lose their prefix const ONELINERS: &[&str] = &["///!", "///", "//!", "//"]; @@ -115,21 +307,29 @@ pub fn strip_doc_comment_decoration(comment: &str, span: Span) -> (String, Vec<( panic!("not a doc-comment: {}", comment); } -pub fn check_attrs<'a>(cx: &EarlyContext<'_>, valid_idents: &FxHashSet, attrs: &'a [ast::Attribute]) { +#[derive(Copy, Clone)] +struct DocHeaders { + safety: bool, + errors: bool, +} + +fn check_attrs<'a>(cx: &LateContext<'_, '_>, valid_idents: &FxHashSet, attrs: &'a [Attribute]) -> DocHeaders { let mut doc = String::new(); let mut spans = vec![]; for attr in attrs { - if attr.is_sugared_doc { - if let Some(ref current) = attr.value_str() { - let current = current.to_string(); - let (current, current_spans) = strip_doc_comment_decoration(¤t, attr.span); - spans.extend_from_slice(¤t_spans); - doc.push_str(¤t); - } + if let AttrKind::DocComment(ref comment) = attr.kind { + let comment = comment.to_string(); + let (comment, current_spans) = strip_doc_comment_decoration(&comment, attr.span); + spans.extend_from_slice(¤t_spans); + doc.push_str(&comment); } else if attr.check_name(sym!(doc)) { // ignore mix of sugared and non-sugared doc - return; + // don't trigger the safety or errors check + return DocHeaders { + safety: true, + errors: true, + }; } } @@ -140,39 +340,50 @@ pub fn check_attrs<'a>(cx: &EarlyContext<'_>, valid_idents: &FxHashSet, current += offset_copy; } - if !doc.is_empty() { - let parser = pulldown_cmark::Parser::new(&doc).into_offset_iter(); - // Iterate over all `Events` and combine consecutive events into one - let events = parser.coalesce(|previous, current| { - use pulldown_cmark::Event::*; - - let previous_range = previous.1; - let current_range = current.1; - - match (previous.0, current.0) { - (Text(previous), Text(current)) => { - let mut previous = previous.to_string(); - previous.push_str(¤t); - Ok((Text(previous.into()), previous_range)) - }, - (previous, current) => Err(((previous, previous_range), (current, current_range))), - } - }); - check_doc(cx, valid_idents, events, &spans); + if doc.is_empty() { + return DocHeaders { + safety: false, + errors: false, + }; } + + let parser = pulldown_cmark::Parser::new(&doc).into_offset_iter(); + // Iterate over all `Events` and combine consecutive events into one + let events = parser.coalesce(|previous, current| { + use pulldown_cmark::Event::*; + + let previous_range = previous.1; + let current_range = current.1; + + match (previous.0, current.0) { + (Text(previous), Text(current)) => { + let mut previous = previous.to_string(); + previous.push_str(¤t); + Ok((Text(previous.into()), previous_range)) + }, + (previous, current) => Err(((previous, previous_range), (current, current_range))), + } + }); + check_doc(cx, valid_idents, events, &spans) } fn check_doc<'a, Events: Iterator, Range)>>( - cx: &EarlyContext<'_>, + cx: &LateContext<'_, '_>, valid_idents: &FxHashSet, events: Events, spans: &[(usize, Span)], -) { +) -> DocHeaders { + // true if a safety header was found use pulldown_cmark::Event::*; use pulldown_cmark::Tag::*; + let mut headers = DocHeaders { + safety: false, + errors: false, + }; let mut in_code = false; let mut in_link = None; + let mut in_heading = false; for (event, range) in events { match event { @@ -180,9 +391,11 @@ fn check_doc<'a, Events: Iterator, Range in_code = false, Start(Link(_, url, _)) => in_link = Some(url), End(Link(..)) => in_link = None, - Start(_tag) | End(_tag) => (), // We don't care about other tags - Html(_html) | InlineHtml(_html) => (), // HTML is weird, just ignore it - SoftBreak | HardBreak | TaskListMarker(_) | Code(_) => (), + Start(Heading(_)) => in_heading = true, + End(Heading(_)) => in_heading = false, + Start(_tag) | End(_tag) => (), // We don't care about other tags + Html(_html) => (), // HTML is weird, just ignore it + SoftBreak | HardBreak | TaskListMarker(_) | Code(_) | Rule => (), FootnoteReference(text) | Text(text) => { if Some(&text) == in_link.as_ref() { // Probably a link of the form `` @@ -190,15 +403,16 @@ fn check_doc<'a, Events: Iterator, Range o, - Err(e) => e - 1, - }; - - let (begin, span) = spans[index]; - + headers.safety |= in_heading && text.trim() == "Safety"; + headers.errors |= in_heading && text.trim() == "Errors"; + let index = match spans.binary_search_by(|c| c.0.cmp(&range.start)) { + Ok(o) => o, + Err(e) => e - 1, + }; + let (begin, span) = spans[index]; + if in_code { + check_code(cx, &text, span); + } else { // Adjust for the beginning of the current `Event` let span = span.with_lo(span.lo() + BytePos::from_usize(range.start - begin)); @@ -207,9 +421,18 @@ fn check_doc<'a, Events: Iterator, Range, text: &str, span: Span) { + if text.contains("fn main() {") && !LEAVE_MAIN_PATTERNS.iter().any(|p| text.contains(p)) { + span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest"); + } } -fn check_text(cx: &EarlyContext<'_>, valid_idents: &FxHashSet, text: &str, span: Span) { +fn check_text(cx: &LateContext<'_, '_>, valid_idents: &FxHashSet, text: &str, span: Span) { for word in text.split(|c: char| c.is_whitespace() || c == '\'') { // Trim punctuation as in `some comment (see foo::bar).` // ^^ @@ -232,7 +455,7 @@ fn check_text(cx: &EarlyContext<'_>, valid_idents: &FxHashSet, text: &st } } -fn check_word(cx: &EarlyContext<'_>, word: &str, span: Span) { +fn check_word(cx: &LateContext<'_, '_>, word: &str, span: Span) { /// Checks if a string is camel-case, i.e., contains at least two uppercase /// letters (`Clippy` is ok) and one lower-case letter (`NASA` is ok). /// Plurals are also excluded (`IDs` is ok).