X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=clippy_lints%2Fsrc%2Fdoc.rs;h=fb53b55ebd6acdd41e0d2a9f94676248bccf4343;hb=4f3b49fffa13518aa6006762c0eb6851c0c0b2d5;hp=6ce36fd2360e1e9569cb748e566a3b38ce57ef4a;hpb=9e73d3368041a74fac40328d91605364e0cedcd7;p=rust.git diff --git a/clippy_lints/src/doc.rs b/clippy_lints/src/doc.rs index 6ce36fd2360..fb53b55ebd6 100644 --- a/clippy_lints/src/doc.rs +++ b/clippy_lints/src/doc.rs @@ -1,16 +1,29 @@ -use crate::utils::{implements_trait, is_entrypoint_fn, is_type_diagnostic_item, return_ty, span_lint}; +use clippy_utils::diagnostics::{span_lint, span_lint_and_note}; +use clippy_utils::ty::{implements_trait, is_type_diagnostic_item}; +use clippy_utils::{is_entrypoint_fn, is_expn_of, match_panic_def_id, method_chain_args, return_ty}; use if_chain::if_chain; use itertools::Itertools; -use rustc_ast::ast::{AttrKind, Attribute}; +use rustc_ast::ast::{Async, AttrKind, Attribute, FnKind, FnRetTy, ItemKind}; use rustc_ast::token::CommentKind; use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::sync::Lrc; +use rustc_errors::emitter::EmitterWriter; +use rustc_errors::Handler; use rustc_hir as hir; +use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor}; +use rustc_hir::{AnonConst, Expr, ExprKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::hir::map::Map; use rustc_middle::lint::in_external_macro; use rustc_middle::ty; +use rustc_parse::maybe_new_parser_from_source_str; +use rustc_parse::parser::ForceCollect; +use rustc_session::parse::ParseSess; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::source_map::{BytePos, MultiSpan, Span}; -use rustc_span::Pos; +use rustc_span::edition::Edition; +use rustc_span::source_map::{BytePos, FilePathMapping, MultiSpan, SourceMap, Span}; +use rustc_span::{sym, FileName, Pos}; +use std::io; use std::ops::Range; use url::Url; @@ -26,6 +39,11 @@ /// **Known problems:** Lots of bad docs won’t be fixed, what the lint checks /// for is limited, and there are still false positives. /// + /// In addition, when writing documentation comments, including `[]` brackets + /// inside a link text would trip the parser. Therfore, documenting link with + /// `[`SmallVec<[T; INLINE_CAPACITY]>`]` and then [`SmallVec<[T; INLINE_CAPACITY]>`]: SmallVec + /// would fail. + /// /// **Examples:** /// ```rust /// /// Do something with the foo_bar parameter. See also @@ -33,6 +51,14 @@ /// // ^ `foo_bar` and `that::other::module::foo` should be ticked. /// fn doit(foo_bar: usize) {} /// ``` + /// + /// ```rust + /// // Link text with `[]` brackets should be written as following: + /// /// Consume the array and return the inner + /// /// [`SmallVec<[T; INLINE_CAPACITY]>`][SmallVec]. + /// /// [SmallVec]: SmallVec + /// fn main() {} + /// ``` pub DOC_MARKDOWN, pedantic, "presence of `_`, `::` or camel-case outside backticks in documentation" @@ -101,6 +127,37 @@ "`pub fn` returns `Result` without `# Errors` in doc comment" } +declare_clippy_lint! { + /// **What it does:** Checks the doc comments of publicly visible functions that + /// may panic and warns if there is no `# Panics` section. + /// + /// **Why is this bad?** Documenting the scenarios in which panicking occurs + /// can help callers who do not want to panic to avoid those situations. + /// + /// **Known problems:** None. + /// + /// **Examples:** + /// + /// Since the following function may panic it has a `# Panics` section in + /// its doc comment: + /// + /// ```rust + /// /// # Panics + /// /// + /// /// Will panic if y is 0 + /// pub fn divide_by(x: i32, y: i32) -> i32 { + /// if y == 0 { + /// panic!("Cannot divide by 0") + /// } else { + /// x / y + /// } + /// } + /// ``` + pub MISSING_PANICS_DOC, + pedantic, + "`pub fn` may panic without `# Panics` in doc comment" +} + declare_clippy_lint! { /// **What it does:** Checks for `fn main() { .. }` in doctests /// @@ -145,28 +202,42 @@ pub fn new(valid_idents: FxHashSet) -> Self { } } -impl_lint_pass!(DocMarkdown => [DOC_MARKDOWN, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, NEEDLESS_DOCTEST_MAIN]); +impl_lint_pass!(DocMarkdown => + [DOC_MARKDOWN, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, NEEDLESS_DOCTEST_MAIN] +); impl<'tcx> LateLintPass<'tcx> for DocMarkdown { - fn check_crate(&mut self, cx: &LateContext<'tcx>, krate: &'tcx hir::Crate<'_>) { - check_attrs(cx, &self.valid_idents, &krate.item.attrs); + fn check_crate(&mut self, cx: &LateContext<'tcx>, _: &'tcx hir::Crate<'_>) { + let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID); + check_attrs(cx, &self.valid_idents, attrs); } fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) { - let headers = check_attrs(cx, &self.valid_idents, &item.attrs); + let attrs = cx.tcx.hir().attrs(item.hir_id()); + let headers = check_attrs(cx, &self.valid_idents, attrs); match item.kind { hir::ItemKind::Fn(ref sig, _, body_id) => { - if !(is_entrypoint_fn(cx, cx.tcx.hir().local_def_id(item.hir_id).to_def_id()) - || in_external_macro(cx.tcx.sess, item.span)) - { - lint_for_missing_headers(cx, item.hir_id, item.span, sig, headers, Some(body_id)); + if !(is_entrypoint_fn(cx, item.def_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) { + let body = cx.tcx.hir().body(body_id); + let mut fpu = FindPanicUnwrap { + cx, + typeck_results: cx.tcx.typeck(item.def_id), + panic_span: None, + }; + fpu.visit_expr(&body.value); + lint_for_missing_headers( + cx, + item.hir_id(), + item.span, + sig, + headers, + Some(body_id), + fpu.panic_span, + ); } }, - hir::ItemKind::Impl { - of_trait: ref trait_ref, - .. - } => { - self.in_trait_impl = trait_ref.is_some(); + hir::ItemKind::Impl(ref impl_) => { + self.in_trait_impl = impl_.of_trait.is_some(); }, _ => {}, } @@ -179,21 +250,38 @@ fn check_item_post(&mut self, _cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> } fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) { - let headers = check_attrs(cx, &self.valid_idents, &item.attrs); + let attrs = cx.tcx.hir().attrs(item.hir_id()); + let headers = check_attrs(cx, &self.valid_idents, attrs); if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind { if !in_external_macro(cx.tcx.sess, item.span) { - lint_for_missing_headers(cx, item.hir_id, item.span, sig, headers, None); + lint_for_missing_headers(cx, item.hir_id(), item.span, sig, headers, None, None); } } } fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) { - let headers = check_attrs(cx, &self.valid_idents, &item.attrs); + let attrs = cx.tcx.hir().attrs(item.hir_id()); + let headers = check_attrs(cx, &self.valid_idents, attrs); if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) { return; } if let hir::ImplItemKind::Fn(ref sig, body_id) = item.kind { - lint_for_missing_headers(cx, item.hir_id, item.span, sig, headers, Some(body_id)); + let body = cx.tcx.hir().body(body_id); + let mut fpu = FindPanicUnwrap { + cx, + typeck_results: cx.tcx.typeck(item.def_id), + panic_span: None, + }; + fpu.visit_expr(&body.value); + lint_for_missing_headers( + cx, + item.hir_id(), + item.span, + sig, + headers, + Some(body_id), + fpu.panic_span, + ); } } } @@ -205,6 +293,7 @@ fn lint_for_missing_headers<'tcx>( sig: &hir::FnSig<'_>, headers: DocHeaders, body_id: Option, + panic_span: Option, ) { if !cx.access_levels.is_exported(hir_id) { return; // Private functions do not require doc comments @@ -217,8 +306,18 @@ fn lint_for_missing_headers<'tcx>( "unsafe function's docs miss `# Safety` section", ); } + if !headers.panics && panic_span.is_some() { + span_lint_and_note( + cx, + MISSING_PANICS_DOC, + span, + "docs for function which may panic missing `# Panics` section", + panic_span, + "first possible panic found here", + ); + } if !headers.errors { - if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym!(result_type)) { + if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::result_type) { span_lint( cx, MISSING_ERRORS_DOC, @@ -229,14 +328,14 @@ fn lint_for_missing_headers<'tcx>( if_chain! { if let Some(body_id) = body_id; if let Some(future) = cx.tcx.lang_items().future_trait(); - let def_id = cx.tcx.hir().body_owner_def_id(body_id); - let mir = cx.tcx.optimized_mir(def_id.to_def_id()); - let ret_ty = mir.return_ty(); + let typeck = cx.tcx.typeck_body(body_id); + let body = cx.tcx.hir().body(body_id); + let ret_ty = typeck.expr_ty(&body.value); if implements_trait(cx, ret_ty, future, &[]); - if let ty::Opaque(_, subs) = ret_ty.kind; + if let ty::Opaque(_, subs) = ret_ty.kind(); if let Some(gen) = subs.types().next(); - if let ty::Generator(_, subs, _) = gen.kind; - if is_type_diagnostic_item(cx, subs.as_generator().return_ty(), sym!(result_type)); + if let ty::Generator(_, subs, _) = gen.kind(); + if is_type_diagnostic_item(cx, subs.as_generator().return_ty(), sym::result_type); then { span_lint( cx, @@ -303,6 +402,7 @@ pub fn strip_doc_comment_decoration(doc: &str, comment_kind: CommentKind, span: struct DocHeaders { safety: bool, errors: bool, + panics: bool, } fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet, attrs: &'a [Attribute]) -> DocHeaders { @@ -314,12 +414,13 @@ fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet, attrs let (comment, current_spans) = strip_doc_comment_decoration(&comment.as_str(), comment_kind, attr.span); spans.extend_from_slice(¤t_spans); doc.push_str(&comment); - } else if attr.has_name(sym!(doc)) { + } else if attr.has_name(sym::doc) { // ignore mix of sugared and non-sugared doc // don't trigger the safety or errors check return DocHeaders { safety: true, errors: true, + panics: true, }; } } @@ -335,6 +436,7 @@ fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet, attrs return DocHeaders { safety: false, errors: false, + panics: false, }; } @@ -358,7 +460,7 @@ fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet, attrs check_doc(cx, valid_idents, events, &spans) } -const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail", "edition2018"]; +const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail"]; fn check_doc<'a, Events: Iterator, Range)>>( cx: &LateContext<'_>, @@ -376,18 +478,30 @@ fn check_doc<'a, Events: Iterator, Range { in_code = true; if let CodeBlockKind::Fenced(lang) = kind { - is_rust = - lang.is_empty() || !lang.contains("ignore") && lang.split(',').any(|i| RUST_CODE.contains(&i)); + for item in lang.split(',') { + if item == "ignore" { + is_rust = false; + break; + } + if let Some(stripped) = item.strip_prefix("edition") { + is_rust = true; + edition = stripped.parse::().ok(); + } else if item.is_empty() || RUST_CODE.contains(&item) { + is_rust = true; + } + } } }, End(CodeBlock(_)) => { @@ -410,6 +524,7 @@ fn check_doc<'a, Events: Iterator, Range o, Err(e) => e - 1, @@ -417,7 +532,8 @@ fn check_doc<'a, Events: Iterator, Range, Range, text: &str, edition: Edition, span: Span) { + fn has_needless_main(code: &str, edition: Edition) -> bool { + rustc_driver::catch_fatal_errors(|| { + rustc_span::with_session_globals(edition, || { + let filename = FileName::anon_source_code(code); + + let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + let emitter = EmitterWriter::new(box io::sink(), None, false, false, false, None, false); + let handler = Handler::with_emitter(false, None, box emitter); + let sess = ParseSess::with_span_handler(handler, sm); + + let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code.into()) { + Ok(p) => p, + Err(errs) => { + for mut err in errs { + err.cancel(); + } + return false; + }, + }; + + let mut relevant_main_found = false; + loop { + match parser.parse_item(ForceCollect::No) { + Ok(Some(item)) => match &item.kind { + // Tests with one of these items are ignored + ItemKind::Static(..) + | ItemKind::Const(..) + | ItemKind::ExternCrate(..) + | ItemKind::ForeignMod(..) => return false, + // We found a main function ... + ItemKind::Fn(box FnKind(_, sig, _, Some(block))) if item.ident.name == sym::main => { + let is_async = matches!(sig.header.asyncness, Async::Yes { .. }); + let returns_nothing = match &sig.decl.output { + FnRetTy::Default(..) => true, + FnRetTy::Ty(ty) if ty.kind.is_unit() => true, + FnRetTy::Ty(_) => false, + }; + + if returns_nothing && !is_async && !block.stmts.is_empty() { + // This main function should be linted, but only if there are no other functions + relevant_main_found = true; + } else { + // This main function should not be linted, we're done + return false; + } + }, + // Another function was found; this case is ignored too + ItemKind::Fn(..) => return false, + _ => {}, + }, + Ok(None) => break, + Err(mut e) => { + e.cancel(); + return false; + }, + } + } + + relevant_main_found + }) + }) + .ok() + .unwrap_or_default() + } -fn check_code(cx: &LateContext<'_>, text: &str, span: Span) { - if text.contains("fn main() {") && !LEAVE_MAIN_PATTERNS.iter().any(|p| text.contains(p)) { + if has_needless_main(text, edition) { span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest"); } } @@ -471,7 +650,7 @@ fn is_camel_case(s: &str) -> bool { return false; } - let s = if s.ends_with('s') { &s[..s.len() - 1] } else { s }; + let s = s.strip_suffix('s').unwrap_or(s); s.chars().all(char::is_alphanumeric) && s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1 @@ -514,3 +693,62 @@ fn has_hyphen(s: &str) -> bool { ); } } + +struct FindPanicUnwrap<'a, 'tcx> { + cx: &'a LateContext<'tcx>, + panic_span: Option, + typeck_results: &'tcx ty::TypeckResults<'tcx>, +} + +impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> { + type Map = Map<'tcx>; + + fn visit_expr(&mut self, expr: &'tcx Expr<'_>) { + if self.panic_span.is_some() { + return; + } + + // check for `begin_panic` + if_chain! { + if let ExprKind::Call(func_expr, _) = expr.kind; + if let ExprKind::Path(QPath::Resolved(_, path)) = func_expr.kind; + if let Some(path_def_id) = path.res.opt_def_id(); + if match_panic_def_id(self.cx, path_def_id); + if is_expn_of(expr.span, "unreachable").is_none(); + if !is_expn_of_debug_assertions(expr.span); + then { + self.panic_span = Some(expr.span); + } + } + + // check for `assert_eq` or `assert_ne` + if is_expn_of(expr.span, "assert_eq").is_some() || is_expn_of(expr.span, "assert_ne").is_some() { + self.panic_span = Some(expr.span); + } + + // check for `unwrap` + if let Some(arglists) = method_chain_args(expr, &["unwrap"]) { + let reciever_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs(); + if is_type_diagnostic_item(self.cx, reciever_ty, sym::option_type) + || is_type_diagnostic_item(self.cx, reciever_ty, sym::result_type) + { + self.panic_span = Some(expr.span); + } + } + + // and check sub-expressions + intravisit::walk_expr(self, expr); + } + + // Panics in const blocks will cause compilation to fail. + fn visit_anon_const(&mut self, _: &'tcx AnonConst) {} + + fn nested_visit_map(&mut self) -> NestedVisitorMap { + NestedVisitorMap::OnlyBodies(self.cx.tcx.hir()) + } +} + +fn is_expn_of_debug_assertions(span: Span) -> bool { + const MACRO_NAMES: &[&str] = &["debug_assert", "debug_assert_eq", "debug_assert_ne"]; + MACRO_NAMES.iter().any(|name| is_expn_of(span, name).is_some()) +}