1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // Determines the ways in which a generic function body depends
12 // on its type parameters. Used to aggressively reuse compiled
13 // function bodies for different types.
15 // This unfortunately depends on quite a bit of knowledge about the
16 // details of the language semantics, and is likely to accidentally go
17 // out of sync when something is changed. It could be made more
18 // powerful by distinguishing between functions that only need to know
19 // the size and alignment of a type, and those that also use its
20 // drop/take glue. But this would increase the fragility of the code
21 // to a ridiculous level, and probably not catch all that many extra
22 // opportunities for reuse.
24 // (An other approach to doing what this code does is to instrument
25 // the translation code to set flags whenever it does something like
26 // alloca a type or get a tydesc. This would not duplicate quite as
27 // much information, but have the disadvantage of being very
31 use middle::trans::common::*;
32 use middle::trans::inline;
36 use std::option::{Some, None};
38 use extra::list::{List, Cons, Nil};
44 use syntax::parse::token;
47 pub type type_uses = uint; // Bitmask
48 pub static use_repr: uint = 1; /* Dependency on size/alignment/mode and
50 pub static use_tydesc: uint = 2; /* Takes the tydesc, or compares */
51 pub static use_all: uint = use_repr|use_tydesc;
55 ccx: @mut CrateContext,
56 uses: @mut ~[type_uses]
59 pub fn type_uses_for(ccx: @mut CrateContext, fn_id: def_id, n_tps: uint)
62 fn store_type_uses(cx: Context, fn_id: def_id) -> @~[type_uses] {
63 let Context { uses, ccx } = cx;
64 let uses = @(*uses).clone(); // freeze
65 ccx.type_use_cache.insert(fn_id, uses);
69 match ccx.type_use_cache.find(&fn_id) {
70 Some(uses) => return *uses,
74 let fn_id_loc = if fn_id.crate == LOCAL_CRATE {
77 inline::maybe_instantiate_inline(ccx, fn_id)
80 // Conservatively assume full use for recursive loops
81 ccx.type_use_cache.insert(fn_id, @vec::from_elem(n_tps, use_all));
85 uses: @mut vec::from_elem(n_tps, 0u)
88 // If the method is a default method, we mark all of the types as
89 // used. This is imprecise, but simple. Getting it right is
90 // tricky because the substs on the call and the substs on the
91 // default method differ, because of substs on the trait/impl.
92 let is_default = ty::provided_source(ccx.tcx, fn_id_loc).is_some();
93 // We also mark all of the params as used if it is an extern thing
94 // that we haven't been able to inline yet.
95 if is_default || fn_id_loc.crate != LOCAL_CRATE {
96 for n in range(0u, n_tps) { cx.uses[n] |= use_all; }
97 return store_type_uses(cx, fn_id);
100 let map_node = match ccx.tcx.items.find(&fn_id_loc.node) {
105 ccx.sess.bug(fmt!("type_uses_for: unbound item ID %?",
111 ast_map::node_item(@ast::item { node: item_fn(_, _, _, _, ref body),
113 ast_map::node_method(@ast::method {body: ref body, _}, _, _) => {
114 handle_body(&cx, body);
116 ast_map::node_trait_method(*) => {
117 // This will be a static trait method. For now, we just assume
118 // it fully depends on all of the type information. (Doing
119 // otherwise would require finding the actual implementation).
120 for n in range(0u, n_tps) { cx.uses[n] |= use_all;}
121 // We need to return early, before the arguments are processed,
122 // because of difficulties in the handling of Self.
123 return store_type_uses(cx, fn_id);
125 ast_map::node_variant(_, _, _) => {
126 for n in range(0u, n_tps) { cx.uses[n] |= use_repr;}
128 ast_map::node_foreign_item(i@@foreign_item {
129 node: foreign_item_fn(*),
135 if abi.is_intrinsic() {
136 let nm = cx.ccx.sess.str_of(i.ident);
137 let name = nm.as_slice();
138 let flags = if name.starts_with("atomic_") {
142 "size_of" | "pref_align_of" | "min_align_of" |
143 "uninit" | "init" | "transmute" | "move_val" |
144 "move_val_init" => use_repr,
146 "get_tydesc" | "needs_drop" | "contains_managed" => use_tydesc,
148 "visit_tydesc" | "forget" | "frame_address" |
149 "morestack_addr" => 0,
151 "offset" | "offset_inbounds" |
152 "memcpy32" | "memcpy64" | "memmove32" | "memmove64" |
153 "memset32" | "memset64" => use_repr,
155 "sqrtf32" | "sqrtf64" | "powif32" | "powif64" |
156 "sinf32" | "sinf64" | "cosf32" | "cosf64" |
157 "powf32" | "powf64" | "expf32" | "expf64" |
158 "exp2f32" | "exp2f64" | "logf32" | "logf64" |
159 "log10f32"| "log10f64"| "log2f32" | "log2f64" |
160 "fmaf32" | "fmaf64" | "fabsf32" | "fabsf64" |
161 "floorf32"| "floorf64"| "ceilf32" | "ceilf64" |
162 "truncf32"| "truncf64" => 0,
164 "ctpop8" | "ctpop16" | "ctpop32" | "ctpop64" => 0,
166 "ctlz8" | "ctlz16" | "ctlz32" | "ctlz64" => 0,
167 "cttz8" | "cttz16" | "cttz32" | "cttz64" => 0,
169 "bswap16" | "bswap32" | "bswap64" => 0,
171 // would be cool to make these an enum instead of
173 _ => fail!("unknown intrinsic in type_use")
176 for n in range(0u, n_tps) { cx.uses[n] |= flags;}
179 ast_map::node_struct_ctor(*) => {
180 // Similarly to node_variant, this monomorphized function just
181 // uses the representations of all of its type parameters.
182 for n in range(0u, n_tps) { cx.uses[n] |= use_repr; }
185 ccx.tcx.sess.bug(fmt!("unknown node type in type_use: %s",
186 ast_map::node_id_to_str(
189 token::get_ident_interner())));
193 // Now handle arguments
194 match ty::get(ty::lookup_item_type(cx.ccx.tcx, fn_id).ty).sty {
195 ty::ty_bare_fn(ty::BareFnTy {sig: ref sig, _}) |
196 ty::ty_closure(ty::ClosureTy {sig: ref sig, _}) => {
197 for arg in sig.inputs.iter() {
198 type_needs(&cx, use_repr, *arg);
204 store_type_uses(cx, fn_id)
207 pub fn type_needs(cx: &Context, use_: uint, ty: ty::t) {
208 // Optimization -- don't descend type if all params already have this use
209 if cx.uses.iter().any(|&elt| elt & use_ != use_) {
210 type_needs_inner(cx, use_, ty, @Nil);
214 pub fn type_needs_inner(cx: &Context,
217 enums_seen: @List<def_id>) {
218 do ty::maybe_walk_ty(ty) |ty| {
219 if ty::type_has_params(ty) {
220 match ty::get(ty).sty {
222 This previously included ty_box -- that was wrong
223 because if we cast an @T to an trait (for example) and return
224 it, we depend on the drop glue for T (we have to write the
225 right tydesc into the result)
231 ty::ty_trait(*) => false,
233 ty::ty_enum(did, ref substs) => {
234 if list::find(enums_seen, |id| *id == did).is_none() {
235 let seen = @Cons(did, enums_seen);
236 let r = ty::enum_variants(cx.ccx.tcx, did);
238 for aty in v.args.iter() {
239 let t = ty::subst(cx.ccx.tcx, &(*substs), *aty);
240 type_needs_inner(cx, use_, t, seen);
247 cx.uses[p.idx] |= use_;
256 pub fn node_type_needs(cx: &Context, use_: uint, id: NodeId) {
257 type_needs(cx, use_, ty::node_id_to_type(cx.ccx.tcx, id));
260 pub fn mark_for_method_call(cx: &Context, e_id: NodeId, callee_id: NodeId) {
261 let mut opt_static_did = None;
263 let r = cx.ccx.maps.method_map.find(&e_id);
264 for mth in r.iter() {
266 typeck::method_static(did) => {
267 opt_static_did = Some(did);
269 typeck::method_param(typeck::method_param {
270 param_num: typeck::param_numbered(param),
273 cx.uses[param] |= use_tydesc;
280 // Note: we do not execute this code from within the each() call
281 // above because the recursive call to `type_needs` can trigger
282 // inlining and hence can cause `method_map` and
283 // `node_type_substs` to be modified.
284 for &did in opt_static_did.iter() {
286 let r = cx.ccx.tcx.node_type_substs.find_copy(&callee_id);
288 let type_uses = type_uses_for(cx.ccx, did, ts.len());
289 for (uses, subst) in type_uses.iter().zip(ts.iter()) {
290 type_needs(cx, *uses, *subst)
297 pub fn mark_for_expr(cx: &Context, e: &expr) {
299 expr_vstore(_, _) | expr_vec(_, _) | expr_struct(*) | expr_tup(_) |
300 expr_unary(_, box(_), _) | expr_unary(_, uniq, _) |
301 expr_binary(_, add, _, _) | expr_repeat(*) => {
302 node_type_needs(cx, use_repr, e.id);
304 expr_cast(base, _) => {
305 let result_t = ty::node_id_to_type(cx.ccx.tcx, e.id);
306 match ty::get(result_t).sty {
308 // When we're casting to an trait, we need the
309 // tydesc for the expr that's being cast.
310 node_type_needs(cx, use_tydesc, base.id);
315 expr_binary(_, op, lhs, _) => {
317 eq | lt | le | ne | ge | gt => {
318 node_type_needs(cx, use_tydesc, lhs.id)
323 expr_path(_) | expr_self => {
324 let opt_ts = cx.ccx.tcx.node_type_substs.find_copy(&e.id);
325 for ts in opt_ts.iter() {
326 let id = ast_util::def_id_of_def(cx.ccx.tcx.def_map.get_copy(&e.id));
327 let uses_for_ts = type_uses_for(cx.ccx, id, ts.len());
328 for (uses, subst) in uses_for_ts.iter().zip(ts.iter()) {
329 type_needs(cx, *uses, *subst)
333 expr_fn_block(*) => {
334 match ty::ty_closure_sigil(ty::expr_ty(cx.ccx.tcx, e)) {
335 ast::OwnedSigil => {}
336 ast::BorrowedSigil | ast::ManagedSigil => {
337 for fv in freevars::get_freevars(cx.ccx.tcx, e.id).iter() {
338 let node_id = ast_util::def_id_of_def(fv.def).node;
339 node_type_needs(cx, use_repr, node_id);
344 expr_assign(val, _) | expr_assign_op(_, _, val, _) |
345 expr_ret(Some(val)) => {
346 node_type_needs(cx, use_repr, val.id);
348 expr_index(callee_id, base, _) => {
349 // FIXME (#2537): could be more careful and not count fields after
351 let base_ty = ty::node_id_to_type(cx.ccx.tcx, base.id);
352 type_needs(cx, use_repr, ty::type_autoderef(cx.ccx.tcx, base_ty));
353 mark_for_method_call(cx, e.id, callee_id);
355 expr_field(base, _, _) => {
356 // Method calls are now a special syntactic form,
357 // so `a.b` should always be a field.
358 assert!(!cx.ccx.maps.method_map.contains_key(&e.id));
360 let base_ty = ty::node_id_to_type(cx.ccx.tcx, base.id);
361 type_needs(cx, use_repr, ty::type_autoderef(cx.ccx.tcx, base_ty));
363 expr_log(_, val) => {
364 node_type_needs(cx, use_tydesc, val.id);
366 expr_call(f, _, _) => {
367 let r = ty::ty_fn_args(ty::node_id_to_type(cx.ccx.tcx, f.id));
369 type_needs(cx, use_repr, *a);
372 expr_method_call(callee_id, rcvr, _, _, _, _) => {
373 let base_ty = ty::node_id_to_type(cx.ccx.tcx, rcvr.id);
374 type_needs(cx, use_repr, ty::type_autoderef(cx.ccx.tcx, base_ty));
376 let r = ty::ty_fn_args(ty::node_id_to_type(cx.ccx.tcx, callee_id));
378 type_needs(cx, use_repr, *a);
380 mark_for_method_call(cx, e.id, callee_id);
383 expr_inline_asm(ref ia) => {
384 for &(_, input) in ia.inputs.iter() {
385 node_type_needs(cx, use_repr, input.id);
387 for &(_, out) in ia.outputs.iter() {
388 node_type_needs(cx, use_repr, out.id);
392 expr_paren(e) => mark_for_expr(cx, e),
394 expr_match(*) | expr_block(_) | expr_if(*) | expr_while(*) |
395 expr_break(_) | expr_again(_) | expr_unary(*) | expr_lit(_) |
396 expr_mac(_) | expr_addr_of(*) | expr_ret(_) | expr_loop(*) |
397 expr_do_body(_) => (),
399 expr_for_loop(*) => fail!("non-desugared expr_for_loop")
403 pub fn handle_body(cx: &Context, body: &Block) {
404 let v = oldvisit::mk_vt(@oldvisit::Visitor {
405 visit_expr: |e, (cx, v)| {
406 oldvisit::visit_expr(e, (cx, v));
407 mark_for_expr(cx, e);
409 visit_local: |l, (cx, v)| {
410 oldvisit::visit_local(l, (cx, v));
411 node_type_needs(cx, use_repr, l.id);
413 visit_pat: |p, (cx, v)| {
414 oldvisit::visit_pat(p, (cx, v));
415 node_type_needs(cx, use_repr, p.id);
417 visit_block: |b, (cx, v)| {
418 oldvisit::visit_block(b, (cx, v));
419 for e in b.expr.iter() {
420 node_type_needs(cx, use_repr, e.id);
423 visit_item: |_i, (_cx, _v)| { },
424 ..*oldvisit::default_visitor()
426 (v.visit_block)(body, (cx, v));