1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #[allow(deprecated_mode)];
15 use serialize::{Encoder, Encodable, Decoder, Decodable};
20 use core::comm::{ChanOne, PortOne, oneshot, send_one};
21 use core::either::{Either, Left, Right};
22 use core::hashmap::HashMap;
24 use core::pipes::recv;
30 * This is a loose clone of the [fbuild build system](https://github.com/felix-lang/fbuild),
31 * made a touch more generic (not wired to special cases on files) and much
32 * less metaprogram-y due to rust's comparative weakness there, relative to
35 * It's based around _imperative builds_ that happen to have some function
36 * calls cached. That is, it's _just_ a mechanism for describing cached
37 * functions. This makes it much simpler and smaller than a "build system"
38 * that produces an IR and evaluates it. The evaluation order is normal
39 * function calls. Some of them just return really quickly.
41 * A cached function consumes and produces a set of _works_. A work has a
42 * name, a kind (that determines how the value is to be checked for
43 * freshness) and a value. Works must also be (de)serializable. Some
47 * ------------------------
52 * Works are conceptually single units, but we store them most of the time
53 * in maps of the form (type,name) => value. These are WorkMaps.
55 * A cached function divides the works it's interested in into inputs and
56 * outputs, and subdivides those into declared (input) works and
57 * discovered (input and output) works.
59 * A _declared_ input or is one that is given to the workcache before
60 * any work actually happens, in the "prep" phase. Even when a function's
61 * work-doing part (the "exec" phase) never gets called, it has declared
62 * inputs, which can be checked for freshness (and potentially
63 * used to determine that the function can be skipped).
65 * The workcache checks _all_ works for freshness, but uses the set of
66 * discovered outputs from the _previous_ exec (which it will re-discover
67 * and re-record each time the exec phase runs).
69 * Therefore the discovered works cached in the db might be a
70 * mis-approximation of the current discoverable works, but this is ok for
71 * the following reason: we assume that if an artifact A changed from
72 * depending on B,C,D to depending on B,C,D,E, then A itself changed (as
73 * part of the change-in-dependencies), so we will be ok.
75 * Each function has a single discriminated output work called its _result_.
76 * This is only different from other works in that it is returned, by value,
77 * from a call to the cacheable function; the other output works are used in
78 * passing to invalidate dependencies elsewhere in the cache, but do not
79 * otherwise escape from a function invocation. Most functions only have one
80 * output work anyways.
82 * A database (the central store of a workcache) stores a mappings:
84 * (fn_name,{declared_input}) => ({discovered_input},
85 * {discovered_output},result)
87 * (Note: fbuild, which workcache is based on, has the concept of a declared
88 * output as separate from a discovered output. This distinction exists only
89 * as an artifact of how fbuild works: via annotations on function types
90 * and metaprogramming, with explicit dependency declaration as a fallback.
91 * Workcache is more explicit about dependencies, and as such treats all
92 * outputs the same, as discovered-during-the-last-run.)
104 impl to_bytes::IterBytes for WorkKey {
106 fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) {
108 self.kind.iter_bytes(lsb0, |bytes| {flag = f(bytes); flag});
110 self.name.iter_bytes(lsb0, f);
114 impl cmp::Ord for WorkKey {
115 fn lt(&self, other: &WorkKey) -> bool {
116 self.kind < other.kind ||
117 (self.kind == other.kind &&
118 self.name < other.name)
120 fn le(&self, other: &WorkKey) -> bool {
121 self.lt(other) || self.eq(other)
123 fn ge(&self, other: &WorkKey) -> bool {
124 self.gt(other) || self.eq(other)
126 fn gt(&self, other: &WorkKey) -> bool {
132 fn new(kind: &str, name: &str) -> WorkKey {
133 WorkKey { kind: kind.to_owned(), name: name.to_owned() }
137 struct WorkMap(HashMap<WorkKey, ~str>);
140 fn new() -> WorkMap { WorkMap(HashMap::new()) }
143 impl<S:Encoder> Encodable<S> for WorkMap {
144 fn encode(&self, s: &S) {
146 for self.each |k, v| {
147 d.push((copy *k, copy *v))
154 impl<D:Decoder> Decodable<D> for WorkMap {
155 fn decode(d: &D) -> WorkMap {
156 let v : ~[(WorkKey,~str)] = Decodable::decode(d);
157 let mut w = WorkMap::new();
158 for v.each |&(k, v)| {
159 w.insert(copy k, copy v);
167 db_cache: HashMap<~str, ~str>,
172 fn prepare(&mut self,
174 declared_inputs: &WorkMap) -> Option<(WorkMap, WorkMap, ~str)>
176 let k = json_encode(&(fn_name, declared_inputs));
177 match self.db_cache.find(&k) {
179 Some(v) => Some(json_decode(*v))
185 declared_inputs: &WorkMap,
186 discovered_inputs: &WorkMap,
187 discovered_outputs: &WorkMap,
189 let k = json_encode(&(fn_name, declared_inputs));
190 let v = json_encode(&(discovered_inputs,
193 self.db_cache.insert(k,v);
199 // FIXME #4432: Fill in
204 fn info(&self, i: &str) {
205 io::println(~"workcache: " + i.to_owned());
213 freshness: HashMap<~str,@fn(&str,&str)->bool>
219 declared_inputs: WorkMap,
223 discovered_inputs: WorkMap,
224 discovered_outputs: WorkMap
229 res: Option<Either<T,PortOne<(Exec,T)>>>
232 fn json_encode<T:Encodable<json::Encoder>>(t: &T) -> ~str {
233 do io::with_str_writer |wr| {
234 t.encode(&json::Encoder(wr));
239 fn json_decode<T:Decodable<json::Decoder>>(s: &str) -> T {
240 do io::with_str_reader(s) |rdr| {
241 let j = result::unwrap(json::from_reader(rdr));
242 Decodable::decode(&json::Decoder(j))
246 fn digest<T:Encodable<json::Encoder>>(t: &T) -> ~str {
247 let mut sha = sha1::sha1();
248 sha.input_str(json_encode(t));
252 fn digest_file(path: &Path) -> ~str {
253 let mut sha = sha1::sha1();
254 let s = io::read_whole_file_str(path);
255 sha.input_str(*s.get_ref());
261 fn new(db: @mut Database,
263 cfg: @json::Object) -> Context {
268 freshness: HashMap::new()
273 Encodable<json::Encoder> +
274 Decodable<json::Decoder>>( // FIXME(#5121)
277 blk: &fn(@mut Prep)->Work<T>) -> Work<T> {
280 fn_name: fn_name.to_owned(),
281 declared_inputs: WorkMap::new()
289 fn declare_input(&mut self, kind:&str, name:&str, val:&str);
290 fn is_fresh(&self, cat:&str, kind:&str, name:&str, val:&str) -> bool;
291 fn all_fresh(&self, cat:&str, map:&WorkMap) -> bool;
293 Encodable<json::Encoder> +
294 Decodable<json::Decoder>>( // FIXME(#5121)
295 &self, blk: ~fn(&Exec) -> T) -> Work<T>;
298 impl TPrep for Prep {
299 fn declare_input(&mut self, kind:&str, name:&str, val:&str) {
300 self.declared_inputs.insert(WorkKey::new(kind, name),
304 fn is_fresh(&self, cat: &str, kind: &str,
305 name: &str, val: &str) -> bool {
306 let k = kind.to_owned();
307 let f = (*self.ctxt.freshness.get(&k))(name, val);
308 let lg = self.ctxt.logger;
310 lg.info(fmt!("%s %s:%s is fresh",
313 lg.info(fmt!("%s %s:%s is not fresh",
319 fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
320 for map.each |k, v| {
321 if ! self.is_fresh(cat, k.kind, k.name, *v) {
329 Encodable<json::Encoder> +
330 Decodable<json::Decoder>>( // FIXME(#5121)
331 &self, blk: ~fn(&Exec) -> T) -> Work<T> {
332 let mut bo = Some(blk);
334 let cached = self.ctxt.db.prepare(self.fn_name, &self.declared_inputs);
337 Some((ref disc_in, ref disc_out, ref res))
338 if self.all_fresh("declared input",
339 &self.declared_inputs) &&
340 self.all_fresh("discovered input", disc_in) &&
341 self.all_fresh("discovered output", disc_out) => {
342 Work::new(@mut *self, Left(json_decode(*res)))
346 let (port, chan) = oneshot();
349 let blk = blk.unwrap();
350 let chan = Cell(chan);
354 discovered_inputs: WorkMap::new(),
355 discovered_outputs: WorkMap::new(),
357 let chan = chan.take();
359 send_one(chan, (exe, v));
361 Work::new(@mut *self, Right(port))
368 Encodable<json::Encoder> +
369 Decodable<json::Decoder>> Work<T> { // FIXME(#5121)
370 fn new(p: @mut Prep, e: Either<T,PortOne<(Exec,T)>>) -> Work<T> {
371 Work { prep: p, res: Some(e) }
375 // FIXME (#3724): movable self. This should be in impl Work.
377 Encodable<json::Encoder> +
378 Decodable<json::Decoder>>( // FIXME(#5121)
388 Some(Right(port)) => {
389 let (exe, v) = match recv(port.unwrap()) {
390 oneshot::send(data) => data
393 let s = json_encode(&v);
399 &exe.discovered_inputs,
400 &exe.discovered_outputs,
409 use core::io::WriterUtil;
411 let db = @mut Database { db_filename: Path("db.json"),
412 db_cache: HashMap::new(),
414 let lg = @mut Logger { a: () };
415 let cfg = @HashMap::new();
416 let cx = @Context::new(db, lg, cfg);
417 let w:Work<~str> = do cx.prep("test1") |prep| {
418 let pth = Path("foo.c");
420 let file = io::file_writer(&pth, [io::Create]).get();
421 file.write_str("int main() { return 0; }");
424 prep.declare_input("file", pth.to_str(), digest_file(&pth));
425 do prep.exec |_exe| {
426 let out = Path("foo.o");
427 run::run_program("gcc", [~"foo.c", ~"-o", out.to_str()]);