1 //! Checks the licenses of third-party dependencies.
3 use cargo_metadata::{Metadata, Package, PackageId, Resolve};
4 use std::collections::{BTreeSet, HashSet};
7 /// These are licenses that are allowed for all crates, including the runtime,
9 const LICENSES: &[&str] = &[
16 "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT", // wasi license
22 /// These are exceptions to Rust's permissive licensing policy, and
23 /// should be considered bugs. Exceptions are only allowed in Rust
24 /// tooling. It is _crucial_ that no exception crates be dependencies
25 /// of the Rust runtime (std/test).
26 const EXCEPTIONS: &[(&str, &str)] = &[
27 ("mdbook", "MPL-2.0"), // mdbook
28 ("openssl", "Apache-2.0"), // cargo, mdbook
29 ("arrayref", "BSD-2-Clause"), // mdbook via handlebars via pest
30 ("toml-query", "MPL-2.0"), // mdbook
31 ("toml-query_derive", "MPL-2.0"), // mdbook
32 ("is-match", "MPL-2.0"), // mdbook
33 ("rdrand", "ISC"), // mdbook, rustfmt
34 ("fuchsia-cprng", "BSD-3-Clause"), // mdbook, rustfmt
35 ("fuchsia-zircon-sys", "BSD-3-Clause"), // rustdoc, rustc, cargo
36 ("fuchsia-zircon", "BSD-3-Clause"), // rustdoc, rustc, cargo (jobserver & tempdir)
37 ("colored", "MPL-2.0"), // rustfmt
38 ("ordslice", "Apache-2.0"), // rls
39 ("cloudabi", "BSD-2-Clause"), // (rls -> crossbeam-channel 0.2 -> rand 0.5)
40 ("ryu", "Apache-2.0 OR BSL-1.0"), // rls/cargo/... (because of serde)
41 ("bytesize", "Apache-2.0"), // cargo
42 ("im-rc", "MPL-2.0+"), // cargo
43 ("adler32", "BSD-3-Clause AND Zlib"), // cargo dep that isn't used
44 ("constant_time_eq", "CC0-1.0"), // rustfmt
45 ("sized-chunks", "MPL-2.0+"), // cargo via im-rc
46 ("bitmaps", "MPL-2.0+"), // cargo via im-rc
47 // FIXME: this dependency violates the documentation comment above:
48 ("fortanix-sgx-abi", "MPL-2.0"), // libstd but only for `sgx` target
49 ("dunce", "CC0-1.0"), // mdbook-linkcheck
50 ("codespan-reporting", "Apache-2.0"), // mdbook-linkcheck
51 ("codespan", "Apache-2.0"), // mdbook-linkcheck
52 ("crossbeam-channel", "MIT/Apache-2.0 AND BSD-2-Clause"), // cargo
55 /// These are the root crates that are part of the runtime. The licenses for
56 /// these and all their dependencies *must not* be in the exception list.
57 const RUNTIME_CRATES: &[&str] = &["std", "core", "alloc", "test", "panic_abort", "panic_unwind"];
59 /// Which crates to check against the whitelist?
60 const WHITELIST_CRATES: &[&str] = &["rustc", "rustc_codegen_llvm"];
62 /// Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.
64 /// This list is here to provide a speed-bump to adding a new dependency to
65 /// rustc. Please check with the compiler team before adding an entry.
66 const WHITELIST: &[&str] = &[
100 "fuchsia-zircon-sys",
155 "stable_deref_trait",
164 "unicode-normalization",
175 "winapi-i686-pc-windows-gnu",
177 "winapi-x86_64-pc-windows-gnu",
182 /// Dependency checks.
184 /// `path` is path to the `src` directory, `cargo` is path to the cargo executable.
185 pub fn check(path: &Path, cargo: &Path, bad: &mut bool) {
186 let mut cmd = cargo_metadata::MetadataCommand::new();
187 cmd.cargo_path(cargo)
188 .manifest_path(path.parent().unwrap().join("Cargo.toml"))
189 .features(cargo_metadata::CargoOpt::AllFeatures);
190 let metadata = t!(cmd.exec());
191 check_exceptions(&metadata, bad);
192 check_whitelist(&metadata, bad);
193 check_crate_duplicate(&metadata, bad);
196 /// Check that all licenses are in the valid list in `LICENSES`.
198 /// Packages listed in `EXCEPTIONS` are allowed for tools.
199 fn check_exceptions(metadata: &Metadata, bad: &mut bool) {
200 // Validate the EXCEPTIONS list hasn't changed.
201 for (name, license) in EXCEPTIONS {
202 // Check that the package actually exists.
203 if !metadata.packages.iter().any(|p| p.name == *name) {
205 "could not find exception package `{}`\n\
206 Remove from EXCEPTIONS list if it is no longer used.",
211 // Check that the license hasn't changed.
212 for pkg in metadata.packages.iter().filter(|p| p.name == *name) {
213 if pkg.name == "fuchsia-cprng" {
214 // This package doesn't declare a license expression. Manual
215 // inspection of the license file is necessary, which appears
216 // to be BSD-3-Clause.
217 assert!(pkg.license.is_none());
223 "dependency exception `{}` does not declare a license expression",
228 Some(pkg_license) => {
229 if pkg_license.as_str() != *license {
230 println!("dependency exception `{}` license has changed", name);
231 println!(" previously `{}` now `{}`", license, pkg_license);
232 println!(" update EXCEPTIONS for the new license");
240 let exception_names: Vec<_> = EXCEPTIONS.iter().map(|(name, _license)| *name).collect();
241 let runtime_ids = compute_runtime_crates(metadata);
243 // Check if any package does not have a valid license.
244 for pkg in &metadata.packages {
245 if pkg.source.is_none() {
246 // No need to check local packages.
249 if !runtime_ids.contains(&pkg.id) && exception_names.contains(&pkg.name.as_str()) {
252 let license = match &pkg.license {
253 Some(license) => license,
255 println!("dependency `{}` does not define a license expression", pkg.id,);
260 if !LICENSES.contains(&license.as_str()) {
261 if pkg.name == "fortanix-sgx-abi" {
262 // This is a specific exception because SGX is considered
263 // "third party". See
264 // https://github.com/rust-lang/rust/issues/62620 for more. In
265 // general, these should never be added.
268 println!("invalid license `{}` in `{}`", license, pkg.id);
274 /// Checks the dependency of `WHITELIST_CRATES` at the given path. Changes `bad` to `true` if a
277 /// Specifically, this checks that the dependencies are on the `WHITELIST`.
278 fn check_whitelist(metadata: &Metadata, bad: &mut bool) {
279 // Check that the WHITELIST does not have unused entries.
280 for name in WHITELIST {
281 if !metadata.packages.iter().any(|p| p.name == *name) {
283 "could not find whitelisted package `{}`\n\
284 Remove from WHITELIST list if it is no longer used.",
290 // Get the whitelist in a convenient form.
291 let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();
293 // Check dependencies.
294 let mut visited = BTreeSet::new();
295 let mut unapproved = BTreeSet::new();
296 for &krate in WHITELIST_CRATES.iter() {
297 let pkg = pkg_from_name(metadata, krate);
298 let mut bad = check_crate_whitelist(&whitelist, metadata, &mut visited, pkg);
299 unapproved.append(&mut bad);
302 if !unapproved.is_empty() {
303 println!("Dependencies not on the whitelist:");
304 for dep in unapproved {
305 println!("* {}", dep);
311 /// Checks the dependencies of the given crate from the given cargo metadata to see if they are on
312 /// the whitelist. Returns a list of illegal dependencies.
313 fn check_crate_whitelist<'a>(
314 whitelist: &'a HashSet<&'static str>,
315 metadata: &'a Metadata,
316 visited: &mut BTreeSet<&'a PackageId>,
318 ) -> BTreeSet<&'a PackageId> {
319 // This will contain bad deps.
320 let mut unapproved = BTreeSet::new();
322 // Check if we have already visited this crate.
323 if visited.contains(&krate.id) {
327 visited.insert(&krate.id);
329 // If this path is in-tree, we don't require it to be on the whitelist.
330 if krate.source.is_some() {
331 // If this dependency is not on `WHITELIST`, add to bad set.
332 if !whitelist.contains(krate.name.as_str()) {
333 unapproved.insert(&krate.id);
337 // Do a DFS in the crate graph.
338 let to_check = deps_of(metadata, &krate.id);
340 for dep in to_check {
341 let mut bad = check_crate_whitelist(whitelist, metadata, visited, dep);
342 unapproved.append(&mut bad);
348 /// Prevents multiple versions of some expensive crates.
349 fn check_crate_duplicate(metadata: &Metadata, bad: &mut bool) {
350 const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[
351 // These two crates take quite a long time to build, so don't allow two versions of them
352 // to accidentally sneak into our dependency graph, in order to ensure we keep our CI times
358 for &name in FORBIDDEN_TO_HAVE_DUPLICATES {
359 let matches: Vec<_> = metadata.packages.iter().filter(|pkg| pkg.name == name).collect();
360 match matches.len() {
363 "crate `{}` is missing, update `check_crate_duplicate` \
364 if it is no longer used",
372 "crate `{}` is duplicated in `Cargo.lock`, \
373 it is too expensive to build multiple times, \
374 so make sure only one version appears across all dependencies",
378 println!(" * {}", pkg.id);
386 /// Returns a list of dependencies for the given package.
387 fn deps_of<'a>(metadata: &'a Metadata, pkg_id: &'a PackageId) -> Vec<&'a Package> {
388 let resolve = metadata.resolve.as_ref().unwrap();
392 .find(|n| &n.id == pkg_id)
393 .unwrap_or_else(|| panic!("could not find `{}` in resolve", pkg_id));
397 metadata.packages.iter().find(|pkg| pkg.id == dep.pkg).unwrap_or_else(|| {
398 panic!("could not find dep `{}` for pkg `{}` in resolve", dep.pkg, pkg_id)
404 /// Finds a package with the given name.
405 fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package {
406 let mut i = metadata.packages.iter().filter(|p| p.name == name);
408 i.next().unwrap_or_else(|| panic!("could not find package `{}` in package list", name));
409 assert!(i.next().is_none(), "more than one package found for `{}`", name);
413 /// Finds all the packages that are in the rust runtime.
414 fn compute_runtime_crates<'a>(metadata: &'a Metadata) -> HashSet<&'a PackageId> {
415 let resolve = metadata.resolve.as_ref().unwrap();
416 let mut result = HashSet::new();
417 for name in RUNTIME_CRATES {
418 let id = &pkg_from_name(metadata, name).id;
419 normal_deps_of_r(resolve, id, &mut result);
424 /// Recursively find all normal dependencies.
425 fn normal_deps_of_r<'a>(
426 resolve: &'a Resolve,
427 pkg_id: &'a PackageId,
428 result: &mut HashSet<&'a PackageId>,
430 if !result.insert(pkg_id) {
436 .find(|n| &n.id == pkg_id)
437 .unwrap_or_else(|| panic!("could not find `{}` in resolve", pkg_id));
438 // Don't care about dev-dependencies.
439 // Build dependencies *shouldn't* matter unless they do some kind of
440 // codegen. For now we'll assume they don't.
441 let deps = node.deps.iter().filter(|node_dep| {
445 .any(|kind_info| kind_info.kind == cargo_metadata::DependencyKind::Normal)
448 normal_deps_of_r(resolve, &dep.pkg, result);