/// assert_eq!(v, vec![1u,2,3,4]);
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
- pub fn into_iter(self) -> MoveItems<T> {
+ pub fn into_iter(self) -> IntoIter<T> {
fn first<A, B>((a, _): (A, B)) -> A { a }
+ let first: fn((T, ())) -> T = first; // coerce to fn pointer
- MoveItems { iter: self.map.into_iter().map(first) }
+ IntoIter { iter: self.map.into_iter().map(first) }
}
}
fn filter<A>((i, v): (uint, Option<A>)) -> Option<(uint, A)> {
v.map(|v| (i, v))
}
+ let filter: fn((uint, Option<V>)) -> Option<(uint, V)> = filter; // coerce to fn ptr
let values = replace(&mut self.v, vec!());
- MoveItems { iter: values.into_iter().enumerate().filter_map(filter) }
+ IntoIter { iter: values.into_iter().enumerate().filter_map(filter) }
}
/// Return the number of elements in the map.
else { line }
}
- self.lines().map(f)
+ let f: fn(&str) -> &str = f; // coerce to fn pointer
+ LinesAny { inner: self.lines().map(f) }
}
#[inline]
}
ref b => {
cx.sess.bug(
- format!("add_env adjustment on non-bare-fn: \
+ format!("add_env adjustment on non-fn-item: \
+ {}",
+ b).as_slice());
+ }
+ }
+ }
+
+ AdjustReifyFnPointer(_) => {
+ match unadjusted_ty.sty {
+ ty::ty_bare_fn(Some(_), ref b) => {
+ ty::mk_bare_fn(cx, None, (*b).clone())
+ }
+ ref b => {
+ cx.sess.bug(
+ format!("AdjustReifyFnPointer adjustment on non-fn-item: \
{}",
- b).as_slice());
+ b)[]);
}
}
}
did: ast::DefId, t: Ty<'tcx>) -> ValueRef {
let name = csearch::get_symbol(&ccx.sess().cstore, did);
match t.sty {
- ty::ty_bare_fn(ref fn_ty) => {
+ ty::ty_bare_fn(_, ref fn_ty) => {
match ccx.sess().target.target.adjust_abi(fn_ty.abi) {
Rust | RustCall => {
- get_extern_rust_fn(ccx, t, name.as_slice(), did)
+ get_extern_rust_fn(ccx, t, name[], did)
}
RustIntrinsic => {
ccx.sess().bug("unexpected intrinsic in trans_external_path")
pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
closure_ty: Ty<'tcx>,
- def: def::Def,
+ def_id: ast::DefId,
fn_ptr: ValueRef,
- is_local: bool) -> ValueRef
-{
+ is_local: bool) -> ValueRef {
+
+ let def_id = match def {
+ def::DefFn(did, _) | def::DefStaticMethod(did, _) |
+ def::DefVariant(_, did, _) | def::DefStruct(did) => did,
+ _ => {
+ ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
+ expected a statically resolved fn, got \
+ {}",
+ def)[]);
+ }
+ };
+
match ccx.closure_bare_wrapper_cache().borrow().get(&fn_ptr) {
Some(&llval) => return llval,
None => {}
cx.sess()
.span_bug(e.span,
format!("unexpected static function: {}",
- store).as_slice())
+ store)[])
}
+ ty::AdjustReifyFnPointer(_def_id) => {
+ // FIXME(#19925) once fn item types are
+ // zero-sized, we'll need to do something here
+ }
ty::AdjustDerefRef(ref adj) => {
let mut ty = ety;
// Save the last autoderef in case we can avoid it.
ccx.tn().val_to_string(llretptr));
let (fn_abi, fn_sig) = match callee_ty.sty {
- ty::ty_bare_fn(ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()),
+ ty::ty_bare_fn(_, ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()),
_ => ccx.sess().bug("trans_native_call called on non-function type")
};
- let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.as_slice());
+ let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys[]);
let fn_type = cabi::compute_abi_info(ccx,
- llsig.llarg_tys.as_slice(),
+ llsig.llarg_tys[],
llsig.llret_ty,
llsig.ret_def);
/// let vec: Vec<(&str, int)> = map.into_iter().collect();
/// ```
#[unstable = "matches collection reform specification, waiting for dust to settle"]
- pub fn into_iter(self) -> MoveEntries<K, V> {
+ pub fn into_iter(self) -> IntoIter<K, V> {
fn last_two<A, B, C>((_, b, c): (A, B, C)) -> (B, C) { (b, c) }
+ let last_two: fn((SafeHash, K, V)) -> (K, V) = last_two;
- MoveEntries {
+ IntoIter {
inner: self.table.into_iter().map(last_two)
}
}
/// Returns an iterator that yields each component of the path as Option<&str>.
/// See components() for details.
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
- let from_utf8: fn(&[u8]) -> Option<&str> = str::from_utf8; // coerce to fn ptr
- self.components().map(from_utf8)
+ fn from_utf8(s: &[u8]) -> Option<&str> {
+ str::from_utf8(s).ok()
+ }
- self.components().map(from_utf8)
++ let f: fn(&[u8]) -> Option<&str> = from_utf8; // coerce to fn ptr
++ self.components().map(f)
}
}
Some(_) => {
let plen = self.prefix_len();
if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
- repr.slice_from(plen+1)
- } else { repr.slice_from(plen) }
+ repr[plen+1..]
+ } else { repr[plen..] }
}
- None if repr.as_bytes()[0] == SEP_BYTE => repr.slice_from(1),
+ None if repr.as_bytes()[0] == SEP_BYTE => repr[1..],
None => repr
};
- let ret = s.split_terminator(SEP).map(Some);
+ let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr
+ let ret = s.split_terminator(SEP).map(some);
ret
}
// None result means the string didn't need normalizing
fn normalize_helper<'a>(s: &'a str, prefix: Option<PathPrefix>) -> (bool, Option<Vec<&'a str>>) {
- let f = if !prefix_is_verbatim(prefix) { is_sep } else { is_sep_verbatim };
+ let f: fn(char) -> bool = if !prefix_is_verbatim(prefix) {
+ is_sep
+ } else {
+ is_sep_verbatim
+ };
let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix)));
- let s_ = s.slice_from(prefix_len(prefix));
- let s_ = if is_abs { s_.slice_from(1) } else { s_ };
+ let s_ = s[prefix_len(prefix)..];
+ let s_ = if is_abs { s_[1..] } else { s_ };
if is_abs && s_.is_empty() {
return (is_abs, match prefix {
#[inline]
fn words(&self) -> Words {
fn is_not_empty(s: &&str) -> bool { !s.is_empty() }
+ let is_not_empty: fn(&&str) -> bool = is_not_empty; // coerce to fn pointer
+
fn is_whitespace(c: char) -> bool { c.is_whitespace() }
+ let is_whitespace: fn(char) -> bool = is_whitespace; // coerce to fn pointer
- self.split(is_whitespace).filter(is_not_empty)
+ Words { inner: self.split(is_whitespace).filter(is_not_empty) }
}
#[inline]