1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Script to check the validity of `href` links in our HTML documentation.
13 //! In the past we've been quite error prone to writing in broken links as most
14 //! of them are manually rather than automatically added. As files move over
15 //! time or apis change old links become stale or broken. The purpose of this
16 //! script is to check all relative links in our documentation to make sure they
17 //! actually point to a valid place.
19 //! Currently this doesn't actually do any HTML parsing or anything fancy like
20 //! that, it just has a simple "regex" to search for `href` and `id` tags.
21 //! These values are then translated to file URLs if possible and then the
22 //! destination is asserted to exist.
24 //! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,
25 //! but this should catch the majority of "broken link" cases.
31 use std::io::prelude::*;
32 use std::path::{Path, PathBuf};
33 use std::collections::{HashMap, HashSet};
34 use std::collections::hash_map::Entry;
41 ($e:expr) => (match $e {
43 Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
48 let docs = env::args().nth(1).unwrap();
49 let docs = env::current_dir().unwrap().join(docs);
50 let mut url = Url::from_file_path(&docs).unwrap();
51 let mut errors = false;
52 walk(&mut HashMap::new(), &docs, &docs, &mut url, &mut errors);
54 panic!("found some broken links");
60 IOError(std::io::Error),
61 BrokenRedirect(PathBuf, std::io::Error),
75 type Cache = HashMap<PathBuf, FileEntry>;
78 fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
79 if self.ids.is_empty() {
80 with_attrs_in_source(contents, " id", |fragment, i| {
81 let frag = fragment.trim_left_matches("#").to_owned();
82 if !self.ids.insert(frag) {
84 println!("{}:{}: id is not unique: `{}`", file.display(), i, fragment);
91 fn walk(cache: &mut Cache, root: &Path, dir: &Path, url: &mut Url, errors: &mut bool) {
92 for entry in t!(dir.read_dir()).map(|e| t!(e)) {
93 let path = entry.path();
94 let kind = t!(entry.file_type());
95 url.path_segments_mut().unwrap().push(entry.file_name().to_str().unwrap());
97 walk(cache, root, &path, url, errors);
99 let pretty_path = check(cache, root, &path, url, errors);
100 if let Some(pretty_path) = pretty_path {
101 let entry = cache.get_mut(&pretty_path).unwrap();
102 // we don't need the source anymore,
103 // so drop to reduce memory-usage
104 entry.source = String::new();
107 url.path_segments_mut().unwrap().pop();
111 fn check(cache: &mut Cache,
117 // ignore js files as they are not prone to errors as the rest of the
118 // documentation is and they otherwise bring up false positives.
119 if file.extension().and_then(|s| s.to_str()) == Some("js") {
123 // Unfortunately we're not 100% full of valid links today to we need a few
124 // whitelists to get this past `make check` today.
126 if file.ends_with("std/string/struct.String.html") {
130 if file.ends_with("collections/string/struct.String.html") {
134 if file.ends_with("btree_set/struct.BTreeSet.html") ||
135 file.ends_with("collections/struct.BTreeSet.html") ||
136 file.ends_with("collections/btree_map/struct.BTreeMap.html") ||
137 file.ends_with("collections/hash_map/struct.HashMap.html") {
141 let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);
142 let (pretty_file, contents) = match res {
144 Err(_) => return None,
147 cache.get_mut(&pretty_file)
149 .parse_ids(&pretty_file, &contents, errors);
152 // Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
153 with_attrs_in_source(&contents, " href", |url, i| {
154 // Ignore external URLs
155 if url.starts_with("http:") || url.starts_with("https:") ||
156 url.starts_with("javascript:") || url.starts_with("ftp:") ||
157 url.starts_with("irc:") || url.starts_with("data:") {
160 // Once we've plucked out the URL, parse it using our base url and
161 // then try to extract a file path.
162 let (parsed_url, path) = match url_to_file_path(&base, url) {
163 Some((url, path)) => (url, PathBuf::from(path)),
166 println!("{}:{}: invalid link - {}",
167 pretty_file.display(),
174 // Alright, if we've found a file name then this file had better
175 // exist! If it doesn't then we register and print an error.
178 // Links to directories show as directory listings when viewing
179 // the docs offline so it's best to avoid them.
181 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
182 println!("{}:{}: directory link - {}",
183 pretty_file.display(),
185 pretty_path.display());
188 let res = load_file(cache, root, path.clone(), FromRedirect(false));
189 let (pretty_path, contents) = match res {
191 Err(LoadError::IOError(err)) => panic!(format!("{}", err)),
192 Err(LoadError::BrokenRedirect(target, _)) => {
194 println!("{}:{}: broken redirect to {}",
195 pretty_file.display(),
200 Err(LoadError::IsRedirect) => unreachable!(),
203 if let Some(ref fragment) = parsed_url.fragment() {
204 // Fragments like `#1-6` are most likely line numbers to be
205 // interpreted by javascript, so we're ignoring these
206 if fragment.splitn(2, '-')
207 .all(|f| f.chars().all(|c| c.is_numeric())) {
211 let entry = &mut cache.get_mut(&pretty_path).unwrap();
212 entry.parse_ids(&pretty_path, &contents, errors);
214 if !entry.ids.contains(*fragment) {
216 print!("{}:{}: broken link fragment ",
217 pretty_file.display(),
219 println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
224 print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
225 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
226 println!("{}", pretty_path.display());
232 fn load_file(cache: &mut Cache,
236 -> Result<(PathBuf, String), LoadError> {
237 let mut contents = String::new();
238 let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));
240 let maybe_redirect = match cache.entry(pretty_file.clone()) {
241 Entry::Occupied(entry) => {
242 contents = entry.get().source.clone();
245 Entry::Vacant(entry) => {
246 let mut fp = File::open(file.clone()).map_err(|err| {
247 if let FromRedirect(true) = redirect {
248 LoadError::BrokenRedirect(file.clone(), err)
250 LoadError::IOError(err)
253 fp.read_to_string(&mut contents).map_err(|err| LoadError::IOError(err))?;
255 let maybe = maybe_redirect(&contents);
257 if let SkipRedirect = redirect {
258 return Err(LoadError::IsRedirect);
261 entry.insert(FileEntry {
262 source: contents.clone(),
269 let base = Url::from_file_path(&file).unwrap();
271 match maybe_redirect.and_then(|url| url_to_file_path(&base, &url)) {
272 Some((_, redirect_file)) => {
273 let path = PathBuf::from(redirect_file);
274 load_file(cache, root, path, FromRedirect(true))
276 None => Ok((pretty_file, contents)),
280 fn maybe_redirect(source: &str) -> Option<String> {
281 const REDIRECT: &'static str = "<p>Redirecting to <a href=";
283 let mut lines = source.lines();
284 let redirect_line = match lines.nth(6) {
289 redirect_line.find(REDIRECT).map(|i| {
290 let rest = &redirect_line[(i + REDIRECT.len() + 1)..];
291 let pos_quote = rest.find('"').unwrap();
292 rest[..pos_quote].to_owned()
296 fn url_to_file_path(parser: &Url, url: &str) -> Option<(Url, PathBuf)> {
299 .and_then(|parsed_url| parsed_url.to_file_path().ok().map(|f| (parsed_url, f)))
302 fn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {
303 for (i, mut line) in contents.lines().enumerate() {
304 while let Some(j) = line.find(attr) {
305 let rest = &line[j + attr.len()..];
307 let pos_equals = match rest.find("=") {
311 if rest[..pos_equals].trim_left_matches(" ") != "" {
315 let rest = &rest[pos_equals + 1..];
317 let pos_quote = match rest.find(&['"', '\''][..]) {
321 let quote_delim = rest.as_bytes()[pos_quote] as char;
323 if rest[..pos_quote].trim_left_matches(" ") != "" {
326 let rest = &rest[pos_quote + 1..];
327 let url = match rest.find(quote_delim) {
328 Some(i) => &rest[..i],