1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Script to check the validity of `href` links in our HTML documentation.
13 //! In the past we've been quite error prone to writing in broken links as most
14 //! of them are manually rather than automatically added. As files move over
15 //! time or apis change old links become stale or broken. The purpose of this
16 //! script is to check all relative links in our documentation to make sure they
17 //! actually point to a valid place.
19 //! Currently this doesn't actually do any HTML parsing or anything fancy like
20 //! that, it just has a simple "regex" to search for `href` and `id` tags.
21 //! These values are then translated to file URLs if possible and then the
22 //! destination is asserted to exist.
24 //! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,
25 //! but this should catch the majority of "broken link" cases.
31 use std::io::prelude::*;
32 use std::path::{Path, PathBuf};
33 use std::collections::{HashMap, HashSet};
34 use std::collections::hash_map::Entry;
36 use url::{Url, UrlParser};
41 ($e:expr) => (match $e {
43 Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
48 let docs = env::args().nth(1).unwrap();
49 let docs = env::current_dir().unwrap().join(docs);
50 let mut url = Url::from_file_path(&docs).unwrap();
51 let mut errors = false;
52 walk(&mut HashMap::new(), &docs, &docs, &mut url, &mut errors);
54 panic!("found some broken links");
60 IOError(std::io::Error),
61 BrokenRedirect(PathBuf, std::io::Error),
75 type Cache = HashMap<PathBuf, FileEntry>;
78 fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
79 if self.ids.is_empty() {
80 with_attrs_in_source(contents, " id", |fragment, i| {
81 let frag = fragment.trim_left_matches("#").to_owned();
82 if !self.ids.insert(frag) {
84 println!("{}:{}: id is not unique: `{}`", file.display(), i, fragment);
91 fn walk(cache: &mut Cache, root: &Path, dir: &Path, url: &mut Url, errors: &mut bool) {
92 for entry in t!(dir.read_dir()).map(|e| t!(e)) {
93 let path = entry.path();
94 let kind = t!(entry.file_type());
95 url.path_mut().unwrap().push(entry.file_name().into_string().unwrap());
97 walk(cache, root, &path, url, errors);
99 let pretty_path = check(cache, root, &path, url, errors);
100 if let Some(pretty_path) = pretty_path {
101 let entry = cache.get_mut(&pretty_path).unwrap();
102 // we don't need the source anymore,
103 // so drop to reduce memory-usage
104 entry.source = String::new();
107 url.path_mut().unwrap().pop();
111 fn check(cache: &mut Cache,
117 // ignore js files as they are not prone to errors as the rest of the
118 // documentation is and they otherwise bring up false positives.
119 if file.extension().and_then(|s| s.to_str()) == Some("js") {
123 // Unfortunately we're not 100% full of valid links today to we need a few
124 // whitelists to get this past `make check` today.
126 if file.ends_with("std/string/struct.String.html") {
130 if file.ends_with("collections/string/struct.String.html") {
134 if file.ends_with("btree_set/struct.BTreeSet.html") ||
135 file.ends_with("collections/struct.BTreeSet.html") ||
136 file.ends_with("collections/btree_map/struct.BTreeMap.html") ||
137 file.ends_with("collections/hash_map/struct.HashMap.html") {
141 let mut parser = UrlParser::new();
142 parser.base_url(base);
144 let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);
145 let (pretty_file, contents) = match res {
147 Err(_) => return None,
150 cache.get_mut(&pretty_file)
152 .parse_ids(&pretty_file, &contents, errors);
155 // Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
156 with_attrs_in_source(&contents, " href", |url, i| {
157 // Ignore external URLs
158 if url.starts_with("http:") || url.starts_with("https:") ||
159 url.starts_with("javascript:") || url.starts_with("ftp:") ||
160 url.starts_with("irc:") || url.starts_with("data:") {
163 // Once we've plucked out the URL, parse it using our base url and
164 // then try to extract a file path.
165 let (parsed_url, path) = match url_to_file_path(&parser, url) {
166 Some((url, path)) => (url, PathBuf::from(path)),
169 println!("{}:{}: invalid link - {}",
170 pretty_file.display(),
177 // Alright, if we've found a file name then this file had better
178 // exist! If it doesn't then we register and print an error.
181 // Links to directories show as directory listings when viewing
182 // the docs offline so it's best to avoid them.
184 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
185 println!("{}:{}: directory link - {}",
186 pretty_file.display(),
188 pretty_path.display());
191 let res = load_file(cache, root, path.clone(), FromRedirect(false));
192 let (pretty_path, contents) = match res {
194 Err(LoadError::IOError(err)) => panic!(format!("{}", err)),
195 Err(LoadError::BrokenRedirect(target, _)) => {
197 println!("{}:{}: broken redirect to {}",
198 pretty_file.display(),
203 Err(LoadError::IsRedirect) => unreachable!(),
206 if let Some(ref fragment) = parsed_url.fragment {
207 // Fragments like `#1-6` are most likely line numbers to be
208 // interpreted by javascript, so we're ignoring these
209 if fragment.splitn(2, '-')
210 .all(|f| f.chars().all(|c| c.is_numeric())) {
214 let entry = &mut cache.get_mut(&pretty_path).unwrap();
215 entry.parse_ids(&pretty_path, &contents, errors);
217 if !entry.ids.contains(fragment) {
219 print!("{}:{}: broken link fragment ",
220 pretty_file.display(),
222 println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
227 print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
228 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
229 println!("{}", pretty_path.display());
235 fn load_file(cache: &mut Cache,
239 -> Result<(PathBuf, String), LoadError> {
240 let mut contents = String::new();
241 let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));
243 let maybe_redirect = match cache.entry(pretty_file.clone()) {
244 Entry::Occupied(entry) => {
245 contents = entry.get().source.clone();
248 Entry::Vacant(entry) => {
249 let mut fp = try!(File::open(file.clone()).map_err(|err| {
250 if let FromRedirect(true) = redirect {
251 LoadError::BrokenRedirect(file.clone(), err)
253 LoadError::IOError(err)
256 try!(fp.read_to_string(&mut contents)
257 .map_err(|err| LoadError::IOError(err)));
259 let maybe = maybe_redirect(&contents);
261 if let SkipRedirect = redirect {
262 return Err(LoadError::IsRedirect);
265 entry.insert(FileEntry {
266 source: contents.clone(),
273 let base = Url::from_file_path(&file).unwrap();
274 let mut parser = UrlParser::new();
275 parser.base_url(&base);
277 match maybe_redirect.and_then(|url| url_to_file_path(&parser, &url)) {
278 Some((_, redirect_file)) => {
279 let path = PathBuf::from(redirect_file);
280 load_file(cache, root, path, FromRedirect(true))
282 None => Ok((pretty_file, contents)),
286 fn maybe_redirect(source: &str) -> Option<String> {
287 const REDIRECT: &'static str = "<p>Redirecting to <a href=";
289 let mut lines = source.lines();
290 let redirect_line = match lines.nth(6) {
295 redirect_line.find(REDIRECT).map(|i| {
296 let rest = &redirect_line[(i + REDIRECT.len() + 1)..];
297 let pos_quote = rest.find('"').unwrap();
298 rest[..pos_quote].to_owned()
302 fn url_to_file_path(parser: &UrlParser, url: &str) -> Option<(Url, PathBuf)> {
305 .and_then(|parsed_url| parsed_url.to_file_path().ok().map(|f| (parsed_url, f)))
308 fn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {
309 for (i, mut line) in contents.lines().enumerate() {
310 while let Some(j) = line.find(attr) {
311 let rest = &line[j + attr.len()..];
313 let pos_equals = match rest.find("=") {
317 if rest[..pos_equals].trim_left_matches(" ") != "" {
321 let rest = &rest[pos_equals + 1..];
323 let pos_quote = match rest.find(&['"', '\''][..]) {
327 let quote_delim = rest.as_bytes()[pos_quote] as char;
329 if rest[..pos_quote].trim_left_matches(" ") != "" {
332 let rest = &rest[pos_quote + 1..];
333 let url = match rest.find(quote_delim) {
334 Some(i) => &rest[..i],