use digest::Digest;
use json;
+use json::ToJson;
use sha1::Sha1;
use serialize::{Encoder, Encodable, Decoder, Decodable};
use arc::{Arc,RWArc};
use treemap::TreeMap;
-
use std::cell::Cell;
use std::comm::{PortOne, oneshot};
use std::either::{Either, Left, Right};
-use std::io;
-use std::run;
-use std::task;
+use std::{io, os, task};
/**
*
}
}
+// FIXME #8883: The key should be a WorkKey and not a ~str.
+// This is working around some JSON weirdness.
+#[deriving(Clone, Eq, Encodable, Decodable)]
+struct WorkMap(TreeMap<~str, KindMap>);
+
#[deriving(Clone, Eq, Encodable, Decodable)]
-struct WorkMap(TreeMap<WorkKey, ~str>);
+struct KindMap(TreeMap<~str, ~str>);
impl WorkMap {
fn new() -> WorkMap { WorkMap(TreeMap::new()) }
+
+ fn insert_work_key(&mut self, k: WorkKey, val: ~str) {
+ let WorkKey { kind, name } = k;
+ match self.find_mut(&name) {
+ Some(&KindMap(ref mut m)) => { m.insert(kind, val); return; }
+ None => ()
+ }
+ let mut new_map = TreeMap::new();
+ new_map.insert(kind, val);
+ self.insert(name, KindMap(new_map));
+ }
}
struct Database {
impl Database {
pub fn new(p: Path) -> Database {
- Database {
+ let mut rslt = Database {
db_filename: p,
db_cache: TreeMap::new(),
db_dirty: false
+ };
+ if os::path_exists(&rslt.db_filename) {
+ rslt.load();
}
+ rslt
}
pub fn prepare(&self,
self.db_cache.insert(k,v);
self.db_dirty = true
}
+
+ // FIXME #4330: This should have &mut self and should set self.db_dirty to false.
+ fn save(&self) {
+ let f = io::file_writer(&self.db_filename, [io::Create, io::Truncate]).unwrap();
+ self.db_cache.to_json().to_pretty_writer(f);
+ }
+
+ fn load(&mut self) {
+ assert!(!self.db_dirty);
+ assert!(os::path_exists(&self.db_filename));
+ let f = io::file_reader(&self.db_filename);
+ match f {
+ Err(e) => fail!("Couldn't load workcache database %s: %s",
+ self.db_filename.to_str(), e.to_str()),
+ Ok(r) =>
+ match json::from_reader(r) {
+ Err(e) => fail!("Couldn't parse workcache database (from file %s): %s",
+ self.db_filename.to_str(), e.to_str()),
+ Ok(r) => {
+ let mut decoder = json::Decoder(r);
+ self.db_cache = Decodable::decode(&mut decoder);
+ }
+ }
+ }
+ }
+}
+
+// FIXME #4330: use &mut self here
+#[unsafe_destructor]
+impl Drop for Database {
+ fn drop(&self) {
+ if self.db_dirty {
+ self.save();
+ }
+ }
}
struct Logger {
}
}
+type FreshnessMap = TreeMap<~str,extern fn(&str,&str)->bool>;
+
#[deriving(Clone)]
struct Context {
db: RWArc<Database>,
logger: RWArc<Logger>,
cfg: Arc<json::Object>,
- freshness: Arc<TreeMap<~str,extern fn(&str,&str)->bool>>
+ /// Map from kinds (source, exe, url, etc.) to a freshness function.
+ /// The freshness function takes a name (e.g. file path) and value
+ /// (e.g. hash of file contents) and determines whether it's up-to-date.
+ /// For example, in the file case, this would read the file off disk,
+ /// hash it, and return the result of comparing the given hash and the
+ /// read hash for equality.
+ freshness: Arc<FreshnessMap>
}
struct Prep<'self> {
// FIXME(#5121)
fn json_decode<T:Decodable<json::Decoder>>(s: &str) -> T {
+ debug!("json decoding: %s", s);
do io::with_str_reader(s) |rdr| {
let j = json::from_reader(rdr).unwrap();
let mut decoder = json::Decoder(j);
pub fn new(db: RWArc<Database>,
lg: RWArc<Logger>,
cfg: Arc<json::Object>) -> Context {
+ Context::new_with_freshness(db, lg, cfg, Arc::new(TreeMap::new()))
+ }
+
+ pub fn new_with_freshness(db: RWArc<Database>,
+ lg: RWArc<Logger>,
+ cfg: Arc<json::Object>,
+ freshness: Arc<FreshnessMap>) -> Context {
Context {
db: db,
logger: lg,
cfg: cfg,
- freshness: Arc::new(TreeMap::new())
+ freshness: freshness
}
}
}
+impl Exec {
+ pub fn discover_input(&mut self, dependency_kind:&str,
+ // Discovered input
+ dependency_name: &str, dependency_val: &str) {
+ debug!("Discovering input %s %s %s", dependency_kind, dependency_name, dependency_val);
+ self.discovered_inputs.insert_work_key(WorkKey::new(dependency_kind, dependency_name),
+ dependency_val.to_owned());
+ }
+ pub fn discover_output(&mut self, dependency_kind:&str,
+ // Discovered output
+ dependency_name: &str, dependency_val: &str) {
+ debug!("Discovering output %s %s %s", dependency_kind, dependency_name, dependency_val);
+ self.discovered_outputs.insert_work_key(WorkKey::new(dependency_kind, dependency_name),
+ dependency_val.to_owned());
+ }
+
+ // returns pairs of (kind, name)
+ pub fn lookup_discovered_inputs(&self) -> ~[(~str, ~str)] {
+ let mut rs = ~[];
+ for (k, v) in self.discovered_inputs.iter() {
+ for (k1, _) in v.iter() {
+ rs.push((k1.clone(), k.clone()));
+ }
+ }
+ rs
+ }
+
+}
+
impl<'self> Prep<'self> {
fn new(ctxt: &'self Context, fn_name: &'self str) -> Prep<'self> {
Prep {
declared_inputs: WorkMap::new()
}
}
+
+ pub fn lookup_declared_inputs(&self) -> ~[~str] {
+ let mut rs = ~[];
+ for (_, v) in self.declared_inputs.iter() {
+ for (inp, _) in v.iter() {
+ rs.push(inp.clone());
+ }
+ }
+ rs
+ }
}
impl<'self> Prep<'self> {
- fn declare_input(&mut self, kind:&str, name:&str, val:&str) {
- self.declared_inputs.insert(WorkKey::new(kind, name),
+ pub fn declare_input(&mut self, kind:&str, name:&str, val:&str) {
+ debug!("Declaring input %s %s %s", kind, name, val);
+ self.declared_inputs.insert_work_key(WorkKey::new(kind, name),
val.to_owned());
}
name: &str, val: &str) -> bool {
let k = kind.to_owned();
let f = self.ctxt.freshness.get().find(&k);
+ debug!("freshness for: %s/%s/%s/%s", cat, kind, name, val)
let fresh = match f {
None => fail!("missing freshness-function for '%s'", kind),
Some(f) => (*f)(name, val)
}
fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
- for (k, v) in map.iter() {
- if ! self.is_fresh(cat, k.kind, k.name, *v) {
- return false;
+ for (k_name, kindmap) in map.iter() {
+ for (k_kind, v) in kindmap.iter() {
+ if ! self.is_fresh(cat, *k_kind, *k_name, *v) {
+ return false;
}
+ }
}
return true;
}
- fn exec<T:Send +
+ pub fn exec<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>>(
- &'self self, blk: ~fn(&Exec) -> T) -> T {
+ &'self self, blk: ~fn(&mut Exec) -> T) -> T {
self.exec_work(blk).unwrap()
}
fn exec_work<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>>( // FIXME(#5121)
- &'self self, blk: ~fn(&Exec) -> T) -> Work<'self, T> {
+ &'self self, blk: ~fn(&mut Exec) -> T) -> Work<'self, T> {
let mut bo = Some(blk);
+ debug!("exec_work: looking up %s and %?", self.fn_name,
+ self.declared_inputs);
let cached = do self.ctxt.db.read |db| {
db.prepare(self.fn_name, &self.declared_inputs)
};
if self.all_fresh("declared input",&self.declared_inputs) &&
self.all_fresh("discovered input", disc_in) &&
self.all_fresh("discovered output", disc_out) => {
+ debug!("Cache hit!");
+ debug!("Trying to decode: %? / %? / %?",
+ disc_in, disc_out, *res);
Left(json_decode(*res))
}
_ => {
+ debug!("Cache miss!");
let (port, chan) = oneshot();
let blk = bo.take_unwrap();
let chan = Cell::new(chan);
+// What happens if the task fails?
do task::spawn {
- let exe = Exec {
+ let mut exe = Exec {
discovered_inputs: WorkMap::new(),
discovered_outputs: WorkMap::new(),
};
let chan = chan.take();
- let v = blk(&exe);
+ let v = blk(&mut exe);
chan.send((exe, v));
}
Right(port)
}
-//#[test]
+#[test]
fn test() {
use std::io::WriterUtil;
+ use std::run;
let pth = Path("foo.c");
{