.unwrap()
.to_string();
- script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[])[]);
+ script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[..])[]);
script_str.push_str("type summary add --no-value ");
script_str.push_str("--python-function lldb_rust_formatters.print_val ");
script_str.push_str("-x \".*\" --category Rust\n");
```{rust}
let vec: Vec<i32> = vec![1, 2, 3];
let arr: [i32; 3] = [1, 2, 3];
-let s: &[i32] = &vec[];
+let s: &[i32] = &vec[..];
```
As you can see, the `vec!` macro allows you to create a `Vec<T>` easily. The
fn main() {
let v = vec!["match_this", "1"];
- match &v[] {
+ match &v[..] {
["match_this", second] => println!("The second element is {}", second),
_ => {},
}
#[unstable(feature = "collections", reason = "trait is unstable")]
impl<T> BorrowFrom<Vec<T>> for [T] {
- fn borrow_from(owned: &Vec<T>) -> &[T] { &owned[] }
+ fn borrow_from(owned: &Vec<T>) -> &[T] { &owned[..] }
}
#[unstable(feature = "collections", reason = "trait is unstable")]
impl<T> BorrowFromMut<Vec<T>> for [T] {
- fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { &mut owned[] }
+ fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { &mut owned[..] }
}
#[unstable(feature = "collections", reason = "trait is unstable")]
#[test]
fn test_slice_from() {
let vec: &[_] = &[1, 2, 3, 4];
- assert_eq!(&vec[], vec);
+ assert_eq!(&vec[..], vec);
let b: &[_] = &[3, 4];
assert_eq!(&vec[2..], b);
let b: &[_] = &[];
#[test]
fn test_lexicographic_permutations_empty_and_short() {
- let empty : &mut[i32] = &mut[];
+ let empty : &mut[i32] = &mut[..];
assert!(empty.next_permutation() == false);
- let b: &mut[i32] = &mut[];
+ let b: &mut[i32] = &mut[..];
assert!(empty == b);
assert!(empty.prev_permutation() == false);
assert!(empty == b);
#[test]
fn test_total_ord() {
let c = &[1, 2, 3];
- [1, 2, 3, 4][].cmp(c) == Greater;
+ [1, 2, 3, 4][..].cmp(c) == Greater;
let c = &[1, 2, 3, 4];
- [1, 2, 3][].cmp(c) == Less;
+ [1, 2, 3][..].cmp(c) == Less;
let c = &[1, 2, 3, 6];
- [1, 2, 3, 4][].cmp(c) == Equal;
+ [1, 2, 3, 4][..].cmp(c) == Equal;
let c = &[1, 2, 3, 4, 5, 6];
- [1, 2, 3, 4, 5, 5, 5, 5][].cmp(c) == Less;
+ [1, 2, 3, 4, 5, 5, 5, 5][..].cmp(c) == Less;
let c = &[1, 2, 3, 4];
- [2, 2][].cmp(c) == Greater;
+ [2, 2][..].cmp(c) == Greater;
}
#[test]
#[unstable(feature = "collections", reason = "trait is unstable")]
impl BorrowFrom<String> for str {
- fn borrow_from(owned: &String) -> &str { &owned[] }
+ fn borrow_from(owned: &String) -> &str { &owned[..] }
}
#[unstable(feature = "collections", reason = "trait is unstable")]
reason = "this functionality may be moved to libunicode")]
fn nfd_chars(&self) -> Decompositions {
Decompositions {
- iter: self[].chars(),
+ iter: self[..].chars(),
buffer: Vec::new(),
sorted: false,
kind: Canonical
reason = "this functionality may be moved to libunicode")]
fn nfkd_chars(&self) -> Decompositions {
Decompositions {
- iter: self[].chars(),
+ iter: self[..].chars(),
buffer: Vec::new(),
sorted: false,
kind: Compatible
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn contains(&self, pat: &str) -> bool {
- core_str::StrExt::contains(&self[], pat)
+ core_str::StrExt::contains(&self[..], pat)
}
/// Returns true if a string contains a char pattern.
#[unstable(feature = "collections",
reason = "might get removed in favour of a more generic contains()")]
fn contains_char<P: CharEq>(&self, pat: P) -> bool {
- core_str::StrExt::contains_char(&self[], pat)
+ core_str::StrExt::contains_char(&self[..], pat)
}
/// An iterator over the characters of `self`. Note, this iterates
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn chars(&self) -> Chars {
- core_str::StrExt::chars(&self[])
+ core_str::StrExt::chars(&self[..])
}
/// An iterator over the bytes of `self`
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn bytes(&self) -> Bytes {
- core_str::StrExt::bytes(&self[])
+ core_str::StrExt::bytes(&self[..])
}
/// An iterator over the characters of `self` and their byte offsets.
#[stable(feature = "rust1", since = "1.0.0")]
fn char_indices(&self) -> CharIndices {
- core_str::StrExt::char_indices(&self[])
+ core_str::StrExt::char_indices(&self[..])
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn split<P: CharEq>(&self, pat: P) -> Split<P> {
- core_str::StrExt::split(&self[], pat)
+ core_str::StrExt::split(&self[..], pat)
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn splitn<P: CharEq>(&self, count: usize, pat: P) -> SplitN<P> {
- core_str::StrExt::splitn(&self[], count, pat)
+ core_str::StrExt::splitn(&self[..], count, pat)
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[unstable(feature = "collections", reason = "might get removed")]
fn split_terminator<P: CharEq>(&self, pat: P) -> SplitTerminator<P> {
- core_str::StrExt::split_terminator(&self[], pat)
+ core_str::StrExt::split_terminator(&self[..], pat)
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn rsplitn<P: CharEq>(&self, count: usize, pat: P) -> RSplitN<P> {
- core_str::StrExt::rsplitn(&self[], count, pat)
+ core_str::StrExt::rsplitn(&self[..], count, pat)
}
/// An iterator over the start and end indices of the disjoint
#[unstable(feature = "collections",
reason = "might have its iterator type changed")]
fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> {
- core_str::StrExt::match_indices(&self[], pat)
+ core_str::StrExt::match_indices(&self[..], pat)
}
/// An iterator over the substrings of `self` separated by the pattern `sep`.
#[unstable(feature = "collections",
reason = "might get removed in the future in favor of a more generic split()")]
fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> {
- core_str::StrExt::split_str(&self[], pat)
+ core_str::StrExt::split_str(&self[..], pat)
}
/// An iterator over the lines of a string (subsequences separated
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn lines(&self) -> Lines {
- core_str::StrExt::lines(&self[])
+ core_str::StrExt::lines(&self[..])
}
/// An iterator over the lines of a string, separated by either
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn lines_any(&self) -> LinesAny {
- core_str::StrExt::lines_any(&self[])
+ core_str::StrExt::lines_any(&self[..])
}
/// Deprecated: use `s[a .. b]` instead.
#[unstable(feature = "collections",
reason = "may have yet to prove its worth")]
fn slice_chars(&self, begin: usize, end: usize) -> &str {
- core_str::StrExt::slice_chars(&self[], begin, end)
+ core_str::StrExt::slice_chars(&self[..], begin, end)
}
/// Takes a bytewise (not UTF-8) slice from a string.
/// the entire slice as well.
#[stable(feature = "rust1", since = "1.0.0")]
unsafe fn slice_unchecked(&self, begin: usize, end: usize) -> &str {
- core_str::StrExt::slice_unchecked(&self[], begin, end)
+ core_str::StrExt::slice_unchecked(&self[..], begin, end)
}
/// Returns true if the pattern `pat` is a prefix of the string.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn starts_with(&self, pat: &str) -> bool {
- core_str::StrExt::starts_with(&self[], pat)
+ core_str::StrExt::starts_with(&self[..], pat)
}
/// Returns true if the pattern `pat` is a suffix of the string.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn ends_with(&self, pat: &str) -> bool {
- core_str::StrExt::ends_with(&self[], pat)
+ core_str::StrExt::ends_with(&self[..], pat)
}
/// Returns a string with all pre- and suffixes that match
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn trim_matches<P: CharEq>(&self, pat: P) -> &str {
- core_str::StrExt::trim_matches(&self[], pat)
+ core_str::StrExt::trim_matches(&self[..], pat)
}
/// Returns a string with all prefixes that match
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn trim_left_matches<P: CharEq>(&self, pat: P) -> &str {
- core_str::StrExt::trim_left_matches(&self[], pat)
+ core_str::StrExt::trim_left_matches(&self[..], pat)
}
/// Returns a string with all suffixes that match
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn trim_right_matches<P: CharEq>(&self, pat: P) -> &str {
- core_str::StrExt::trim_right_matches(&self[], pat)
+ core_str::StrExt::trim_right_matches(&self[..], pat)
}
/// Check that `index`-th byte lies at the start and/or end of a
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn is_char_boundary(&self, index: usize) -> bool {
- core_str::StrExt::is_char_boundary(&self[], index)
+ core_str::StrExt::is_char_boundary(&self[..], index)
}
/// Pluck a character out of a string and return the index of the next
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_range_at(&self, start: usize) -> CharRange {
- core_str::StrExt::char_range_at(&self[], start)
+ core_str::StrExt::char_range_at(&self[..], start)
}
/// Given a byte position and a str, return the previous char and its position.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_range_at_reverse(&self, start: usize) -> CharRange {
- core_str::StrExt::char_range_at_reverse(&self[], start)
+ core_str::StrExt::char_range_at_reverse(&self[..], start)
}
/// Plucks the character starting at the `i`th byte of a string.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_at(&self, i: usize) -> char {
- core_str::StrExt::char_at(&self[], i)
+ core_str::StrExt::char_at(&self[..], i)
}
/// Plucks the character ending at the `i`th byte of a string.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_at_reverse(&self, i: usize) -> char {
- core_str::StrExt::char_at_reverse(&self[], i)
+ core_str::StrExt::char_at_reverse(&self[..], i)
}
/// Work with the byte buffer of a string as a byte slice.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn as_bytes(&self) -> &[u8] {
- core_str::StrExt::as_bytes(&self[])
+ core_str::StrExt::as_bytes(&self[..])
}
/// Returns the byte index of the first character of `self` that
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn find<P: CharEq>(&self, pat: P) -> Option<usize> {
- core_str::StrExt::find(&self[], pat)
+ core_str::StrExt::find(&self[..], pat)
}
/// Returns the byte index of the last character of `self` that
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn rfind<P: CharEq>(&self, pat: P) -> Option<usize> {
- core_str::StrExt::rfind(&self[], pat)
+ core_str::StrExt::rfind(&self[..], pat)
}
/// Returns the byte index of the first matching substring
#[unstable(feature = "collections",
reason = "might get removed in favor of a more generic find in the future")]
fn find_str(&self, needle: &str) -> Option<usize> {
- core_str::StrExt::find_str(&self[], needle)
+ core_str::StrExt::find_str(&self[..], needle)
}
/// Retrieves the first character from a string slice and returns
#[unstable(feature = "collections",
reason = "awaiting conventions about shifting and slices")]
fn slice_shift_char(&self) -> Option<(char, &str)> {
- core_str::StrExt::slice_shift_char(&self[])
+ core_str::StrExt::slice_shift_char(&self[..])
}
/// Returns the byte offset of an inner slice relative to an enclosing outer slice.
#[unstable(feature = "collections",
reason = "awaiting convention about comparability of arbitrary slices")]
fn subslice_offset(&self, inner: &str) -> usize {
- core_str::StrExt::subslice_offset(&self[], inner)
+ core_str::StrExt::subslice_offset(&self[..], inner)
}
/// Return an unsafe pointer to the strings buffer.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn as_ptr(&self) -> *const u8 {
- core_str::StrExt::as_ptr(&self[])
+ core_str::StrExt::as_ptr(&self[..])
}
/// Return an iterator of `u16` over the string encoded as UTF-16.
#[unstable(feature = "collections",
reason = "this functionality may only be provided by libunicode")]
fn utf16_units(&self) -> Utf16Units {
- Utf16Units { encoder: Utf16Encoder::new(self[].chars()) }
+ Utf16Units { encoder: Utf16Encoder::new(self[..].chars()) }
}
/// Return the number of bytes in this string
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn len(&self) -> usize {
- core_str::StrExt::len(&self[])
+ core_str::StrExt::len(&self[..])
}
/// Returns true if this slice contains no bytes
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn is_empty(&self) -> bool {
- core_str::StrExt::is_empty(&self[])
+ core_str::StrExt::is_empty(&self[..])
}
/// Parse this string into the specified type.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn parse<F: FromStr>(&self) -> Result<F, F::Err> {
- core_str::StrExt::parse(&self[])
+ core_str::StrExt::parse(&self[..])
}
/// Returns an iterator over the
#[unstable(feature = "collections",
reason = "this functionality may only be provided by libunicode")]
fn graphemes(&self, is_extended: bool) -> Graphemes {
- UnicodeStr::graphemes(&self[], is_extended)
+ UnicodeStr::graphemes(&self[..], is_extended)
}
/// Returns an iterator over the grapheme clusters of self and their byte offsets.
#[unstable(feature = "collections",
reason = "this functionality may only be provided by libunicode")]
fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices {
- UnicodeStr::grapheme_indices(&self[], is_extended)
+ UnicodeStr::grapheme_indices(&self[..], is_extended)
}
/// An iterator over the words of a string (subsequences separated
#[unstable(feature = "str_words",
reason = "the precise algorithm to use is unclear")]
fn words(&self) -> Words {
- UnicodeStr::words(&self[])
+ UnicodeStr::words(&self[..])
}
/// Returns a string's displayed width in columns, treating control
#[unstable(feature = "collections",
reason = "this functionality may only be provided by libunicode")]
fn width(&self, is_cjk: bool) -> usize {
- UnicodeStr::width(&self[], is_cjk)
+ UnicodeStr::width(&self[..], is_cjk)
}
/// Returns a string with leading and trailing whitespace removed.
#[stable(feature = "rust1", since = "1.0.0")]
fn trim(&self) -> &str {
- UnicodeStr::trim(&self[])
+ UnicodeStr::trim(&self[..])
}
/// Returns a string with leading whitespace removed.
#[stable(feature = "rust1", since = "1.0.0")]
fn trim_left(&self) -> &str {
- UnicodeStr::trim_left(&self[])
+ UnicodeStr::trim_left(&self[..])
}
/// Returns a string with trailing whitespace removed.
#[stable(feature = "rust1", since = "1.0.0")]
fn trim_right(&self) -> &str {
- UnicodeStr::trim_right(&self[])
+ UnicodeStr::trim_right(&self[..])
}
}
&["\u{378}\u{308}\u{903}"], &["\u{378}\u{308}", "\u{903}"]),
];
- for &(s, g) in &test_same[] {
+ for &(s, g) in &test_same[..] {
// test forward iterator
assert!(order::equals(s.graphemes(true), g.iter().cloned()));
assert!(order::equals(s.graphemes(false), g.iter().cloned()));
type Output = str;
#[inline]
fn index(&self, index: &ops::Range<usize>) -> &str {
- &self[][*index]
+ &self[..][*index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
type Output = str;
#[inline]
fn index(&self, index: &ops::RangeTo<usize>) -> &str {
- &self[][*index]
+ &self[..][*index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
type Output = str;
#[inline]
fn index(&self, index: &ops::RangeFrom<usize>) -> &str {
- &self[][*index]
+ &self[..][*index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn deref(&self) -> &str {
- unsafe { mem::transmute(&self.vec[]) }
+ unsafe { mem::transmute(&self.vec[..]) }
}
}
#[test]
fn test_slicing() {
let s = "foobar".to_string();
- assert_eq!("foobar", &s[]);
+ assert_eq!("foobar", &s[..]);
assert_eq!("foo", &s[..3]);
assert_eq!("bar", &s[3..]);
assert_eq!("oob", &s[1..4]);
b.bytes = src_len as u64;
b.iter(|| {
- let dst = src.clone()[].to_vec();
+ let dst = src.clone()[..].to_vec();
assert_eq!(dst.len(), src_len);
assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
});
impl<S: hash::Writer + Hasher, T: Hash<S>> Hash<S> for [T; $N] {
fn hash(&self, state: &mut S) {
- Hash::hash(&self[], state)
+ Hash::hash(&self[..], state)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for [T; $N] {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Debug::fmt(&&self[], f)
+ fmt::Debug::fmt(&&self[..], f)
}
}
impl<A, B> PartialEq<[B; $N]> for [A; $N] where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &[B; $N]) -> bool {
- &self[] == &other[]
+ &self[..] == &other[..]
}
#[inline]
fn ne(&self, other: &[B; $N]) -> bool {
- &self[] != &other[]
+ &self[..] != &other[..]
}
}
{
#[inline(always)]
fn eq(&self, other: &Rhs) -> bool {
- PartialEq::eq(&self[], &**other)
+ PartialEq::eq(&self[..], &**other)
}
#[inline(always)]
fn ne(&self, other: &Rhs) -> bool {
- PartialEq::ne(&self[], &**other)
+ PartialEq::ne(&self[..], &**other)
}
}
{
#[inline(always)]
fn eq(&self, other: &[B; $N]) -> bool {
- PartialEq::eq(&**self, &other[])
+ PartialEq::eq(&**self, &other[..])
}
#[inline(always)]
fn ne(&self, other: &[B; $N]) -> bool {
- PartialEq::ne(&**self, &other[])
+ PartialEq::ne(&**self, &other[..])
}
}
impl<T:PartialOrd> PartialOrd for [T; $N] {
#[inline]
fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
- PartialOrd::partial_cmp(&&self[], &&other[])
+ PartialOrd::partial_cmp(&&self[..], &&other[..])
}
#[inline]
fn lt(&self, other: &[T; $N]) -> bool {
- PartialOrd::lt(&&self[], &&other[])
+ PartialOrd::lt(&&self[..], &&other[..])
}
#[inline]
fn le(&self, other: &[T; $N]) -> bool {
- PartialOrd::le(&&self[], &&other[])
+ PartialOrd::le(&&self[..], &&other[..])
}
#[inline]
fn ge(&self, other: &[T; $N]) -> bool {
- PartialOrd::ge(&&self[], &&other[])
+ PartialOrd::ge(&&self[..], &&other[..])
}
#[inline]
fn gt(&self, other: &[T; $N]) -> bool {
- PartialOrd::gt(&&self[], &&other[])
+ PartialOrd::gt(&&self[..], &&other[..])
}
}
impl<T:Ord> Ord for [T; $N] {
#[inline]
fn cmp(&self, other: &[T; $N]) -> Ordering {
- Ord::cmp(&&self[], &&other[])
+ Ord::cmp(&&self[..], &&other[..])
}
}
)+
/// Any string that can be represented as a slice
#[unstable(feature = "core",
reason = "Instead of taking this bound generically, this trait will be \
- replaced with one of slicing syntax (&foo[]), deref coercions, or \
+ replaced with one of slicing syntax (&foo[..]), deref coercions, or \
a more generic conversion trait")]
pub trait Str {
/// Work with `self` as a slice.
.collect::<Vec<uint>>();
assert_eq!(n, xs.len());
- assert_eq!(&xs[], &ys[]);
+ assert_eq!(&xs[..], &ys[..]);
}
#[test]
{
let mut iter = data.iter();
- assert_eq!(&iter[], &other_data[]);
+ assert_eq!(&iter[..], &other_data[..]);
iter.next();
- assert_eq!(&iter[], &other_data[1..]);
+ assert_eq!(&iter[..], &other_data[1..]);
iter.next_back();
- assert_eq!(&iter[], &other_data[1..2]);
+ assert_eq!(&iter[..], &other_data[1..2]);
let s = iter.as_slice();
iter.next();
}
{
let mut iter = data.iter_mut();
- assert_eq!(&iter[], &other_data[]);
+ assert_eq!(&iter[..], &other_data[..]);
// mutability:
assert!(&mut iter[] == other_data);
iter.next();
- assert_eq!(&iter[], &other_data[1..]);
+ assert_eq!(&iter[..], &other_data[1..]);
assert!(&mut iter[] == &mut other_data[1..]);
iter.next_back();
- assert_eq!(&iter[], &other_data[1..2]);
+ assert_eq!(&iter[..], &other_data[1..2]);
assert!(&mut iter[] == &mut other_data[1..2]);
let s = iter.into_slice();
}
Some((_, other)) => {
self.err(&format!("expected `{:?}`, found `{:?}`", c,
- other)[]);
+ other));
}
None => {
self.err(&format!("expected `{:?}` but string was terminated",
- c)[]);
+ c));
}
}
}
impl Matches {
fn opt_vals(&self, nm: &str) -> Vec<Optval> {
- match find_opt(&self.opts[], Name::from_str(nm)) {
+ match find_opt(&self.opts[..], Name::from_str(nm)) {
Some(id) => self.vals[id].clone(),
None => panic!("No option '{}' defined", nm)
}
/// Returns the string argument supplied to one of several matching options or `None`.
pub fn opts_str(&self, names: &[String]) -> Option<String> {
for nm in names {
- match self.opt_val(&nm[]) {
+ match self.opt_val(&nm[..]) {
Some(Val(ref s)) => return Some(s.clone()),
_ => ()
}
while i < l {
let cur = args[i].clone();
let curlen = cur.len();
- if !is_arg(&cur[]) {
+ if !is_arg(&cur[..]) {
free.push(cur);
} else if cur == "--" {
let mut j = i + 1;
v.push(Val((i_arg.clone())
.unwrap()));
} else if name_pos < names.len() || i + 1 == l ||
- is_arg(&args[i + 1][]) {
+ is_arg(&args[i + 1][..]) {
let v = &mut vals[optid];
v.push(Given);
} else {
0 => {}
1 => {
row.push('-');
- row.push_str(&short_name[]);
+ row.push_str(&short_name[..]);
row.push(' ');
}
_ => panic!("the short name should only be 1 ascii char long"),
0 => {}
_ => {
row.push_str("--");
- row.push_str(&long_name[]);
+ row.push_str(&long_name[..]);
row.push(' ');
}
}
// arg
match hasarg {
No => {}
- Yes => row.push_str(&hint[]),
+ Yes => row.push_str(&hint[..]),
Maybe => {
row.push('[');
- row.push_str(&hint[]);
+ row.push_str(&hint[..]);
row.push(']');
}
}
row.push(' ');
}
} else {
- row.push_str(&desc_sep[]);
+ row.push_str(&desc_sep[..]);
}
// Normalize desc to contain words separated by one space character
// FIXME: #5516 should be graphemes not codepoints
let mut desc_rows = Vec::new();
- each_split_within(&desc_normalized_whitespace[], 54, |substr| {
+ each_split_within(&desc_normalized_whitespace[..], 54, |substr| {
desc_rows.push(substr.to_string());
true
});
// FIXME: #5516 should be graphemes not codepoints
// wrapped description
- row.push_str(&desc_rows.connect(&desc_sep[])[]);
+ row.push_str(&desc_rows.connect(&desc_sep[..])[]);
row
});
// Use short_name is possible, but fallback to long_name.
if opt.short_name.len() > 0 {
line.push('-');
- line.push_str(&opt.short_name[]);
+ line.push_str(&opt.short_name[..]);
} else {
line.push_str("--");
- line.push_str(&opt.long_name[]);
+ line.push_str(&opt.long_name[..]);
}
if opt.hasarg != No {
if opt.hasarg == Maybe {
line.push('[');
}
- line.push_str(&opt.hint[]);
+ line.push_str(&opt.hint[..]);
if opt.hasarg == Maybe {
line.push(']');
}
line.push_str(&opts.iter()
.map(format_option)
.collect::<Vec<String>>()
- .connect(" ")[]);
+ .connect(" ")[..]);
line
}
pub fn escape(&self) -> String {
match self {
&LabelStr(ref s) => s.escape_default(),
- &EscStr(ref s) => LabelText::escape_str(&s[]),
+ &EscStr(ref s) => LabelText::escape_str(&s[..]),
}
}
let mut prefix = self.pre_escaped_content().into_owned();
let suffix = suffix.pre_escaped_content();
prefix.push_str(r"\n\n");
- prefix.push_str(&suffix[]);
+ prefix.push_str(&suffix[..]);
EscStr(prefix.into_cow())
}
}
impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph {
fn graph_id(&'a self) -> Id<'a> {
- Id::new(&self.name[]).unwrap()
+ Id::new(&self.name[..]).unwrap()
}
fn node_id(&'a self, n: &Node) -> Id<'a> {
id_name(n)
// Test the literal string from args against the current filter, if there
// is one.
match unsafe { FILTER.as_ref() } {
- Some(filter) if !args.to_string().contains(&filter[]) => return,
+ Some(filter) if !args.to_string().contains(&filter[..]) => return,
_ => {}
}
// Search for the longest match, the vector is assumed to be pre-sorted.
for directive in iter.rev() {
match directive.name {
- Some(ref name) if !module.starts_with(&name[]) => {},
+ Some(ref name) if !module.starts_with(&name[..]) => {},
Some(..) | None => {
return level <= directive.level
}
/// `Once` primitive (and this function is called from that primitive).
fn init() {
let (mut directives, filter) = match env::var("RUST_LOG") {
- Ok(spec) => directive::parse_logging_spec(&spec[]),
+ Ok(spec) => directive::parse_logging_spec(&spec[..]),
Err(..) => (Vec::new(), None),
};
if n_uniq > 0 {
let s = ty_to_string(cx.tcx, ty);
let m = format!("type uses owned (Box type) pointers: {}", s);
- cx.span_lint(BOX_POINTERS, span, &m[]);
+ cx.span_lint(BOX_POINTERS, span, &m[..]);
}
}
}
}
} else {
let attrs = csearch::get_item_attrs(&cx.sess().cstore, did);
- warned |= check_must_use(cx, &attrs[], s.span);
+ warned |= check_must_use(cx, &attrs[..], s.span);
}
}
_ => {}
} else {
format!("{} `{}` should have a camel case name such as `{}`", sort, s, c)
};
- cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[]);
+ cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]);
}
}
}
if !is_snake_case(ident) {
let sc = NonSnakeCase::to_snake_case(&s);
- if sc != &s[] {
+ if sc != &s[..] {
cx.span_lint(NON_SNAKE_CASE, span,
&*format!("{} `{}` should have a snake case name such as `{}`",
sort, s, sc));
if s.chars().any(|c| c.is_lowercase()) {
let uc: String = NonSnakeCase::to_snake_case(&s).chars()
.map(|c| c.to_uppercase()).collect();
- if uc != &s[] {
+ if uc != &s[..] {
cx.span_lint(NON_UPPER_CASE_GLOBALS, span,
&format!("{} `{}` should have an upper case name such as `{}`",
sort, s, uc));
let m = format!("braces around {} is unnecessary",
&token::get_ident(*name));
cx.span_lint(UNUSED_IMPORT_BRACES, item.span,
- &m[]);
+ &m[..]);
},
_ => ()
}
let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| {
attr.check_name("doc") && match attr.meta_item_list() {
None => false,
- Some(l) => attr::contains_name(&l[], "hidden"),
+ Some(l) => attr::contains_name(&l[..], "hidden"),
}
});
self.doc_hidden_stack.push(doc_hidden);
_ => format!("use of {} item", label)
};
- cx.span_lint(lint, span, &msg[]);
+ cx.span_lint(lint, span, &msg[..]);
}
}
}
match (sess, from_plugin) {
// We load builtin lints first, so a duplicate is a compiler bug.
// Use early_error when handling -W help with no crate.
- (None, _) => early_error(&msg[]),
- (Some(sess), false) => sess.bug(&msg[]),
+ (None, _) => early_error(&msg[..]),
+ (Some(sess), false) => sess.bug(&msg[..]),
// A duplicate name from a plugin is a user error.
- (Some(sess), true) => sess.err(&msg[]),
+ (Some(sess), true) => sess.err(&msg[..]),
}
}
match (sess, from_plugin) {
// We load builtin lints first, so a duplicate is a compiler bug.
// Use early_error when handling -W help with no crate.
- (None, _) => early_error(&msg[]),
- (Some(sess), false) => sess.bug(&msg[]),
+ (None, _) => early_error(&msg[..]),
+ (Some(sess), false) => sess.bug(&msg[..]),
// A duplicate name from a plugin is a user error.
- (Some(sess), true) => sess.err(&msg[]),
+ (Some(sess), true) => sess.err(&msg[..]),
}
}
}
let warning = format!("lint {} has been renamed to {}",
lint_name, new_name);
match span {
- Some(span) => sess.span_warn(span, &warning[]),
- None => sess.warn(&warning[]),
+ Some(span) => sess.span_warn(span, &warning[..]),
+ None => sess.warn(&warning[..]),
};
Some(lint_id)
}
pub fn process_command_line(&mut self, sess: &Session) {
for &(ref lint_name, level) in &sess.opts.lint_opts {
- match self.find_lint(&lint_name[], sess, None) {
+ match self.find_lint(&lint_name[..], sess, None) {
Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
None => {
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
.collect::<FnvHashMap<&'static str,
Vec<LintId>>>()
- .get(&lint_name[]) {
+ .get(&lint_name[..]) {
Some(v) => {
v.iter()
.map(|lint_id: &LintId|
if level == Forbid { level = Deny; }
match (level, span) {
- (Warn, Some(sp)) => sess.span_warn(sp, &msg[]),
- (Warn, None) => sess.warn(&msg[]),
- (Deny, Some(sp)) => sess.span_err(sp, &msg[]),
- (Deny, None) => sess.err(&msg[]),
+ (Warn, Some(sp)) => sess.span_warn(sp, &msg[..]),
+ (Warn, None) => sess.warn(&msg[..]),
+ (Deny, Some(sp)) => sess.span_err(sp, &msg[..]),
+ (Deny, None) => sess.err(&msg[..]),
_ => sess.bug("impossible level in raw_emit_lint"),
}
if let Some(note) = note {
- sess.note(¬e[]);
+ sess.note(¬e[..]);
}
if let Some(span) = def {
match self.lints.find_lint(&lint_name, &self.tcx.sess, Some(span)) {
Some(lint_id) => vec![(lint_id, level, span)],
None => {
- match self.lints.lint_groups.get(&lint_name[]) {
+ match self.lints.lint_groups.get(&lint_name[..]) {
Some(&(ref v, _)) => v.iter()
.map(|lint_id: &LintId|
(*lint_id, level, span))
None => {}
Some(lints) => {
for (lint_id, span, msg) in lints {
- self.span_lint(lint_id.lint, span, &msg[])
+ self.span_lint(lint_id.lint, span, &msg[..])
}
}
}
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.to_string();
- validate_crate_name(Some(self.sess), &name[],
+ validate_crate_name(Some(self.sess), &name[..],
Some(i.span));
name
}
let source = self.sess.cstore.get_used_crate_source(cnum).unwrap();
if let Some(locs) = self.sess.opts.externs.get(name) {
let found = locs.iter().any(|l| {
- let l = fs::realpath(&Path::new(&l[])).ok();
+ let l = fs::realpath(&Path::new(&l[..])).ok();
source.dylib.as_ref().map(|p| &p.0) == l.as_ref() ||
source.rlib.as_ref().map(|p| &p.0) == l.as_ref()
});
let mut load_ctxt = loader::Context {
sess: self.sess,
span: span,
- ident: &ident[],
- crate_name: &name[],
+ ident: &ident[..],
+ crate_name: &name[..],
hash: None,
filesearch: self.sess.host_filesearch(PathKind::Crate),
target: &self.sess.host,
name,
config::host_triple(),
self.sess.opts.target_triple);
- self.sess.span_err(span, &message[]);
+ self.sess.span_err(span, &message[..]);
self.sess.abort_if_errors();
}
let message = format!("plugin `{}` only found in rlib format, \
but must be available in dylib format",
name);
- self.sess.span_err(span, &message[]);
+ self.sess.span_err(span, &message[..]);
// No need to abort because the loading code will just ignore this
// empty dylib.
None
rbml_w.end_tag();
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(&s[]);
+ rbml_w.wr_str(&s[..]);
rbml_w.end_tag();
}
let fields = ty::lookup_struct_fields(ecx.tcx, def_id);
let idx = encode_info_for_struct(ecx,
rbml_w,
- &fields[],
+ &fields[..],
index);
- encode_struct_fields(rbml_w, &fields[], def_id);
+ encode_struct_fields(rbml_w, &fields[..], def_id);
encode_index(rbml_w, idx, write_i64);
}
}
class itself */
let idx = encode_info_for_struct(ecx,
rbml_w,
- &fields[],
+ &fields[..],
index);
/* Index the class*/
/* Encode def_ids for each field and method
for methods, write all the stuff get_trait_method
needs to know*/
- encode_struct_fields(rbml_w, &fields[], def_id);
+ encode_struct_fields(rbml_w, &fields[..], def_id);
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
- self.sess.span_err(self.span, &message[]);
+ self.sess.span_err(self.span, &message[..]);
if self.rejected_via_triple.len() > 0 {
let mismatches = self.rejected_via_triple.iter();
None => return FileDoesntMatch,
Some(file) => file,
};
- let (hash, rlib) = if file.starts_with(&rlib_prefix[]) &&
+ let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) &&
file.ends_with(".rlib") {
(&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())],
true)
(&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())],
false)
} else {
- if file.starts_with(&staticlib_prefix[]) &&
+ if file.starts_with(&staticlib_prefix[..]) &&
file.ends_with(".a") {
staticlibs.push(CrateMismatch {
path: path.clone(),
let mut rlibs = HashMap::new();
let mut dylibs = HashMap::new();
{
- let locs = locs.iter().map(|l| Path::new(&l[])).filter(|loc| {
+ let locs = locs.iter().map(|l| Path::new(&l[..])).filter(|loc| {
if !loc.exists() {
sess.err(&format!("extern location for {} does not exist: {}",
self.crate_name, loc.display())[]);
return true
} else {
let (ref prefix, ref suffix) = dylibname;
- if file.starts_with(&prefix[]) &&
- file.ends_with(&suffix[]) {
+ if file.starts_with(&prefix[..]) &&
+ file.ends_with(&suffix[..]) {
return true
}
}
assert_eq!(next(st), '[');
scan(st, |c| c == ']', |bytes| {
let abi_str = str::from_utf8(bytes).unwrap();
- abi::lookup(&abi_str[]).expect(abi_str)
+ abi::lookup(&abi_str[..]).expect(abi_str)
})
}
// Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_string(path.iter().cloned());
path_as_str = Some(s);
- path_as_str.as_ref().map(|x| &x[])
+ path_as_str.as_ref().map(|x| &x[..])
});
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
let s = replace_newline_with_backslash_l(s);
label.push_str(&format!("exiting scope_{} {}",
i,
- &s[])[]);
+ &s[..])[]);
}
dot::LabelText::EscStr(label.into_cow())
}
}
// Fourth, check for unreachable arms.
- check_arms(cx, &inlined_arms[], source);
+ check_arms(cx, &inlined_arms[..], source);
// Finally, check if the whole match expression is exhaustive.
// Check for empty enum, because is_useful only works on inhabited types.
for pat in pats {
let v = vec![&**pat];
- match is_useful(cx, &seen, &v[], LeaveOutWitness) {
+ match is_useful(cx, &seen, &v[..], LeaveOutWitness) {
NotUseful => {
match source {
ast::MatchSource::IfLetDesugar { .. } => {
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix, source: ast::MatchSource) {
match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) {
UsefulWithWitness(pats) => {
- let witness = match &pats[] {
+ let witness = match &pats[..] {
[ref witness] => &**witness,
[] => DUMMY_WILD_PAT,
_ => unreachable!()
ast::MatchSource::ForLoopDesugar => {
// `witness` has the form `Some(<head>)`, peel off the `Some`
let witness = match witness.node {
- ast::PatEnum(_, Some(ref pats)) => match &pats[] {
+ ast::PatEnum(_, Some(ref pats)) => match &pats[..] {
[ref pat] => &**pat,
_ => unreachable!(),
},
UsefulWithWitness(pats) => UsefulWithWitness({
let arity = constructor_arity(cx, &c, left_ty);
let mut result = {
- let pat_slice = &pats[];
+ let pat_slice = &pats[..];
let subpats: Vec<_> = (0..arity).map(|i| {
pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
}).collect();
witness: WitnessPreference) -> Usefulness {
let arity = constructor_arity(cx, &ctor, lty);
let matrix = Matrix(m.iter().filter_map(|r| {
- specialize(cx, &r[], &ctor, 0, arity)
+ specialize(cx, &r[..], &ctor, 0, arity)
}).collect());
match specialize(cx, v, &ctor, 0, arity) {
- Some(v) => is_useful(cx, &matrix, &v[], witness),
+ Some(v) => is_useful(cx, &matrix, &v[..], witness),
None => NotUseful
}
}
None => None,
Some(ast_map::NodeItem(it)) => match it.node {
ast::ItemEnum(ast::EnumDef { ref variants }, _) => {
- variant_expr(&variants[], variant_def.node)
+ variant_expr(&variants[..], variant_def.node)
}
_ => None
},
// NOTE this doesn't do the right thing, it compares inlined
// NodeId's to the original variant_def's NodeId, but they
// come from different crates, so they will likely never match.
- variant_expr(&variants[], variant_def.node).map(|e| e.id)
+ variant_expr(&variants[..], variant_def.node).map(|e| e.id)
}
_ => None
},
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
match eval_const_expr_partial(tcx, e, None) {
Ok(r) => r,
- Err(s) => tcx.sess.span_fatal(e.span, &s[])
+ Err(s) => tcx.sess.span_fatal(e.span, &s[..])
}
}
let a = match eval_const_expr_partial(tcx, a, ty_hint) {
Ok(a) => a,
Err(s) => {
- tcx.sess.span_err(a.span, &s[]);
+ tcx.sess.span_err(a.span, &s[..]);
return None;
}
};
let b = match eval_const_expr_partial(tcx, b, ty_hint) {
Ok(b) => b,
Err(s) => {
- tcx.sess.span_err(b.span, &s[]);
+ tcx.sess.span_err(b.span, &s[..]);
return None;
}
};
let mut t = on_entry.to_vec();
self.apply_gen_kill(cfgidx, &mut t);
temp_bits = t;
- &temp_bits[]
+ &temp_bits[..]
}
};
debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}",
let bits = &mut self.kills[start.. end];
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]",
self.analysis_name, flow_exit, mut_bits_to_string(bits));
- bits.clone_from_slice(&orig_kills[]);
+ bits.clone_from_slice(&orig_kills[..]);
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]",
self.analysis_name, flow_exit, mut_bits_to_string(bits));
}
for attr in lint::gather_attrs(attrs) {
match attr {
Ok((ref name, lint::Allow, _))
- if &name[] == dead_code => return true,
+ if &name[..] == dead_code => return true,
_ => (),
}
}
let msg = format!("Pattern has unexpected def: {:?} and type {}",
def,
cmt_pat.ty.repr(tcx));
- tcx.sess.span_bug(pat.span, &msg[])
+ tcx.sess.span_bug(pat.span, &msg[..])
}
}
}
for _ in a_regions {
invariance.push(ty::Invariant);
}
- &invariance[]
+ &invariance[..]
}
};
ref trace_origins,
ref same_regions) => {
if !same_regions.is_empty() {
- self.report_processed_errors(&var_origins[],
- &trace_origins[],
- &same_regions[]);
+ self.report_processed_errors(&var_origins[..],
+ &trace_origins[..],
+ &same_regions[..]);
}
}
}
let parent = self.tcx.map.get_parent(scope_id);
let parent_node = self.tcx.map.find(parent);
let taken = lifetimes_in_scope(self.tcx, scope_id);
- let life_giver = LifeGiver::with_taken(&taken[]);
+ let life_giver = LifeGiver::with_taken(&taken[..]);
let node_inner = match parent_node {
Some(ref node) => match *node {
ast_map::NodeItem(ref item) => {
}
expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime,
&anon_nums, ®ion_names);
- inputs = self.rebuild_args_ty(&inputs[], lifetime,
+ inputs = self.rebuild_args_ty(&inputs[..], lifetime,
&anon_nums, ®ion_names);
output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names);
ty_params = self.rebuild_ty_params(ty_params, lifetime,
opt_explicit_self, generics);
let msg = format!("consider using an explicit lifetime \
parameter as shown: {}", suggested_fn);
- self.tcx.sess.span_help(span, &msg[]);
+ self.tcx.sess.span_help(span, &msg[..]);
}
fn report_inference_failure(&self,
s.push_str(&num_to_string(self.counter.get())[]);
if !self.taken.contains(&s) {
lifetime = name_to_dummy_lifetime(
- token::str_to_ident(&s[]).name);
+ token::str_to_ident(&s[..]).name);
self.generated.borrow_mut().push(lifetime);
break;
}
self.expansion(&mut var_data);
self.contraction(&mut var_data);
let values =
- self.extract_values_and_collect_conflicts(&var_data[],
+ self.extract_values_and_collect_conflicts(&var_data[..],
errors);
self.collect_concrete_region_errors(&values, errors);
values
fn visit_item(&mut self, item: &ast::Item) {
match extract(&item.attrs) {
Some(value) => {
- let item_index = self.item_refs.get(&value[]).map(|x| *x);
+ let item_index = self.item_refs.get(&value[..]).map(|x| *x);
match item_index {
Some(item_index) => {
// Uninteresting cases: just propagate in rev exec order
ast::ExprVec(ref exprs) => {
- self.propagate_through_exprs(&exprs[], succ)
+ self.propagate_through_exprs(&exprs[..], succ)
}
ast::ExprRepeat(ref element, ref count) => {
} else {
succ
};
- let succ = self.propagate_through_exprs(&args[], succ);
+ let succ = self.propagate_through_exprs(&args[..], succ);
self.propagate_through_expr(&**f, succ)
}
} else {
succ
};
- self.propagate_through_exprs(&args[], succ)
+ self.propagate_through_exprs(&args[..], succ)
}
ast::ExprTup(ref exprs) => {
- self.propagate_through_exprs(&exprs[], succ)
+ self.propagate_through_exprs(&exprs[..], succ)
}
ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op.node) => {
let all_bounds =
util::transitive_bounds(
- self.tcx(), &caller_trait_refs[]);
+ self.tcx(), &caller_trait_refs[..]);
let matching_bounds =
all_bounds.filter(
};
match result {
Ok(trait_did) => trait_did,
- Err(err) => cx.sess.fatal(&err[]),
+ Err(err) => cx.sess.fatal(&err[..]),
}
}
}
}
&ty_tup(ref ts) => {
- self.add_tys(&ts[]);
+ self.add_tys(&ts[..]);
}
&ty_bare_fn(_, ref f) => {
ty_struct(did, substs) => {
let flds = struct_fields(cx, did, substs);
let mut res =
- TypeContents::union(&flds[],
+ TypeContents::union(&flds[..],
|f| tc_mt(cx, f.mt, cache));
if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) {
}
ty_tup(ref tys) => {
- TypeContents::union(&tys[],
+ TypeContents::union(&tys[..],
|ty| tc_ty(cx, *ty, cache))
}
ty_enum(did, substs) => {
let variants = substd_enum_variants(cx, did, substs);
let mut res =
- TypeContents::union(&variants[], |variant| {
+ TypeContents::union(&variants[..], |variant| {
TypeContents::union(&variant.args[],
|arg_ty| {
tc_ty(cx, *arg_ty, cache)
match item.node {
ItemTrait(_, _, _, ref ms) => {
let (_, p) =
- ast_util::split_trait_methods(&ms[]);
+ ast_util::split_trait_methods(&ms[..]);
p.iter()
.map(|m| {
match impl_or_trait_item(
{
match tcx.freevars.borrow().get(&fid) {
None => f(&[]),
- Some(d) => f(&d[])
+ Some(d) => f(&d[..])
}
}
pub fn link_name(attrs: &[ast::Attribute]) -> Option<InternedString> {
lang_items::extract(attrs).and_then(|name| {
- $(if &name[] == stringify!($name) {
+ $(if &name[..] == stringify!($name) {
Some(InternedString::new(stringify!($sym)))
} else)* {
None
// inside this crate, so continue would spew "macro undefined"
// errors
Err(err) => {
- self.sess.span_fatal(span, &err[])
+ self.sess.span_fatal(span, &err[..])
}
};
unsafe {
let registrar =
- match lib.symbol(&symbol[]) {
+ match lib.symbol(&symbol[..]) {
Ok(registrar) => {
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
}
// again fatal if we can't register macros
Err(err) => {
- self.sess.span_fatal(span, &err[])
+ self.sess.span_fatal(span, &err[..])
}
};
append_configuration(&mut user_cfg, InternedString::new("test"))
}
let mut v = user_cfg.into_iter().collect::<Vec<_>>();
- v.push_all(&default_cfg[]);
+ v.push_all(&default_cfg[..]);
v
}
pub fn build_session_options(matches: &getopts::Matches) -> Options {
let unparsed_crate_types = matches.opt_strs("crate-type");
let crate_types = parse_crate_types_from_list(unparsed_crate_types)
- .unwrap_or_else(|e| early_error(&e[]));
+ .unwrap_or_else(|e| early_error(&e[..]));
let mut lint_opts = vec!();
let mut describe_lints = false;
let mut search_paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
- search_paths.add_path(&s[]);
+ search_paths.add_path(&s[..]);
}
let libs = matches.opt_strs("l").into_iter().map(|s| {
--debuginfo");
}
- let color = match matches.opt_str("color").as_ref().map(|s| &s[]) {
+ let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
Some("auto") => Auto,
Some("always") => Always,
Some("never") => Never,
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess);
- assert!((attr::contains_name(&cfg[], "test")));
+ assert!((attr::contains_name(&cfg[..], "test")));
}
// When the user supplies --test and --cfg test, don't implicitly add
}
pub fn span_err(&self, sp: Span, msg: &str) {
match split_msg_into_multilines(msg) {
- Some(msg) => self.diagnostic().span_err(sp, &msg[]),
+ Some(msg) => self.diagnostic().span_err(sp, &msg[..]),
None => self.diagnostic().span_err(sp, msg)
}
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
match split_msg_into_multilines(msg) {
- Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[], code),
+ Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[..], code),
None => self.diagnostic().span_err_with_code(sp, msg, code)
}
}
for c in (0u32..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
- assert_eq!(lev_distance(&c[], &c[]), 0);
+ assert_eq!(lev_distance(&c[..], &c[..]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
Some(def_id) => {
s.push_str(" {");
let path_str = ty::item_path_str(cx, def_id);
- s.push_str(&path_str[]);
+ s.push_str(&path_str[..]);
s.push_str("}");
}
None => { }
.iter()
.map(|elem| ty_to_string(cx, *elem))
.collect::<Vec<_>>();
- match &strs[] {
+ match &strs[..] {
[ref string] => format!("({},)", string),
strs => format!("({})", strs.connect(", "))
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
- repr_vec(tcx, &self[])
+ repr_vec(tcx, &self[..])
}
}
// autoderef cannot convert the &[T] handler
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
- repr_vec(tcx, &self[])
+ repr_vec(tcx, &self[..])
}
}
&base,
trait_ref.substs,
trait_ref.def_id,
- &projection_bounds[],
+ &projection_bounds[..],
|| ty::lookup_trait_def(tcx, trait_ref.def_id).generics.clone())
}
}
}
})
});
- let names: Vec<_> = names.iter().map(|s| &s[]).collect();
+ let names: Vec<_> = names.iter().map(|s| &s[..]).collect();
let value_str = unbound_value.user_string(tcx);
if names.len() == 0 {
args: &str, cwd: Option<&Path>,
paths: &[&Path]) -> ProcessOutput {
let ar = match *maybe_ar_prog {
- Some(ref ar) => &ar[],
+ Some(ref ar) => &ar[..],
None => "ar"
};
let mut cmd = Command::new(ar);
o
},
Err(e) => {
- handler.err(&format!("could not exec `{}`: {}", &ar[],
+ handler.err(&format!("could not exec `{}`: {}", &ar[..],
e)[]);
handler.abort_if_errors();
panic!("rustc::back::archive::run_ar() should not reach this point");
for path in search_paths {
debug!("looking for {} inside {:?}", name, path.display());
- let test = path.join(&oslibname[]);
+ let test = path.join(&oslibname[..]);
if test.exists() { return test }
if oslibname != unixlibname {
- let test = path.join(&unixlibname[]);
+ let test = path.join(&unixlibname[..]);
if test.exists() { return test }
}
}
// as simple comparison is not enough - there
// might be also an extra name suffix
let obj_start = format!("{}", name);
- let obj_start = &obj_start[];
+ let obj_start = &obj_start[..];
// Ignoring all bytecode files, no matter of
// name
let bc_ext = ".bytecode.deflate";
- self.add_archive(rlib, &name[], |fname: &str| {
+ self.add_archive(rlib, &name[..], |fname: &str| {
let skip_obj = lto && fname.starts_with(obj_start)
&& fname.ends_with(".o");
skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME
// allow running `ar s file.a` to update symbols only.
if self.should_update_symbols {
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
- "s", Some(self.work_dir.path()), &args[]);
+ "s", Some(self.work_dir.path()), &args[..]);
}
return self.archive;
}
// Add the archive members seen so far, without updating the
// symbol table (`S`).
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
- "cruS", Some(self.work_dir.path()), &args[]);
+ "cruS", Some(self.work_dir.path()), &args[..]);
args.clear();
args.push(&abs_dst);
// necessary.
let flags = if self.should_update_symbols { "crus" } else { "cruS" };
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
- flags, Some(self.work_dir.path()), &args[]);
+ flags, Some(self.work_dir.path()), &args[..]);
self.archive
}
} else {
filename
};
- let new_filename = self.work_dir.path().join(&filename[]);
+ let new_filename = self.work_dir.path().join(&filename[..]);
try!(fs::rename(file, &new_filename));
self.members.push(Path::new(filename));
}
l.map(|p| p.clone())
}).collect::<Vec<_>>();
- let rpaths = get_rpaths(config, &libs[]);
- flags.push_all(&rpaths_to_flags(&rpaths[])[]);
+ let rpaths = get_rpaths(config, &libs[..]);
+ flags.push_all(&rpaths_to_flags(&rpaths[..])[]);
flags
}
}
}
- log_rpaths("relative", &rel_rpaths[]);
- log_rpaths("fallback", &fallback_rpaths[]);
+ log_rpaths("relative", &rel_rpaths[..]);
+ log_rpaths("fallback", &fallback_rpaths[..]);
let mut rpaths = rel_rpaths;
- rpaths.push_all(&fallback_rpaths[]);
+ rpaths.push_all(&fallback_rpaths[..]);
// Remove duplicates
- let rpaths = minimize_rpaths(&rpaths[]);
+ let rpaths = minimize_rpaths(&rpaths[..]);
return rpaths;
}
let mut set = HashSet::new();
let mut minimized = Vec::new();
for rpath in rpaths {
- if set.insert(&rpath[]) {
+ if set.insert(&rpath[..]) {
minimized.push(rpath.clone());
}
}
macro_rules! key {
($key_name:ident) => ( {
let name = (stringify!($key_name)).replace("_", "-");
- obj.find(&name[]).map(|o| o.as_string()
+ obj.find(&name[..]).map(|o| o.as_string()
.map(|s| base.options.$key_name = s.to_string()));
} );
($key_name:ident, bool) => ( {
let name = (stringify!($key_name)).replace("_", "-");
- obj.find(&name[])
+ obj.find(&name[..])
.map(|o| o.as_boolean()
.map(|s| base.options.$key_name = s));
} );
($key_name:ident, list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
- obj.find(&name[]).map(|o| o.as_array()
+ obj.find(&name[..]).map(|o| o.as_array()
.map(|v| base.options.$key_name = v.iter()
.map(|a| a.as_string().unwrap().to_string()).collect()
)
&self.bccx.loan_path_to_string(move_path)[])
};
- self.bccx.span_err(span, &err_message[]);
+ self.bccx.span_err(span, &err_message[..]);
self.bccx.span_note(
loan_span,
&format!("borrow of `{}` occurs here",
// This represents the collection of all but one of the elements
// from an array at the path described by the move path index.
// Note that attached MovePathIndex should have mem_categorization
- // of InteriorElement (i.e. array dereference `&foo[]`).
+ // of InteriorElement (i.e. array dereference `&foo[..]`).
AllButOneFrom(MovePathIndex),
}
// First, filter out duplicates
moved.sort();
moved.dedup();
- debug!("fragments 1 moved: {:?}", path_lps(&moved[]));
+ debug!("fragments 1 moved: {:?}", path_lps(&moved[..]));
assigned.sort();
assigned.dedup();
- debug!("fragments 1 assigned: {:?}", path_lps(&assigned[]));
+ debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..]));
// Second, build parents from the moved and assigned.
for m in &moved {
parents.sort();
parents.dedup();
- debug!("fragments 2 parents: {:?}", path_lps(&parents[]));
+ debug!("fragments 2 parents: {:?}", path_lps(&parents[..]));
// Third, filter the moved and assigned fragments down to just the non-parents
- moved.retain(|f| non_member(*f, &parents[]));
- debug!("fragments 3 moved: {:?}", path_lps(&moved[]));
+ moved.retain(|f| non_member(*f, &parents[..]));
+ debug!("fragments 3 moved: {:?}", path_lps(&moved[..]));
- assigned.retain(|f| non_member(*f, &parents[]));
- debug!("fragments 3 assigned: {:?}", path_lps(&assigned[]));
+ assigned.retain(|f| non_member(*f, &parents[..]));
+ debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..]));
// Fourth, build the leftover from the moved, assigned, and parents.
for m in &moved {
unmoved.sort();
unmoved.dedup();
- debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[]));
+ debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..]));
// Fifth, filter the leftover fragments down to its core.
unmoved.retain(|f| match *f {
AllButOneFrom(_) => true,
- Just(mpi) => non_member(mpi, &parents[]) &&
- non_member(mpi, &moved[]) &&
- non_member(mpi, &assigned[])
+ Just(mpi) => non_member(mpi, &parents[..]) &&
+ non_member(mpi, &moved[..]) &&
+ non_member(mpi, &assigned[..])
});
- debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[]));
+ debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..]));
// Swap contents back in.
fragments.unmoved_fragments = unmoved;
let msg = format!("type {} ({:?}) is not fragmentable",
parent_ty.repr(tcx), sty_and_variant_info);
let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id));
- tcx.sess.opt_span_bug(opt_span, &msg[])
+ tcx.sess.opt_span_bug(opt_span, &msg[..])
}
}
}
check_loans::check_loans(this,
&loan_dfcx,
flowed_moves,
- &all_loans[],
+ &all_loans[..],
id,
decl,
body);
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
- set.push_str(&loan_str[]);
+ set.push_str(&loan_str[..]);
saw_some = true;
true
});
let expanded_crate
= match phase_2_configure_and_expand(&sess,
krate,
- &id[],
+ &id[..],
addl_plugins) {
None => return,
Some(k) => k
&sess,
outdir,
&expanded_crate,
- &id[]));
+ &id[..]));
let mut forest = ast_map::Forest::new(expanded_crate);
let arenas = ty::CtxtArenas::new();
let ast_map = assign_node_ids_and_map(&sess, &mut forest);
- write_out_deps(&sess, input, &outputs, &id[]);
+ write_out_deps(&sess, input, &outputs, &id[..]);
controller_entry_point!(after_write_deps,
CompileState::state_after_write_deps(input,
&sess,
outdir,
&ast_map,
- &id[]));
+ &id[..]));
let analysis = phase_3_run_analysis_passes(sess,
ast_map,
-> Compilation {
match matches.opt_str("explain") {
Some(ref code) => {
- match descriptions.find_description(&code[]) {
+ match descriptions.find_description(&code[..]) {
Some(ref description) => {
println!("{}", description);
}
for lint in lints {
let name = lint.name_lower().replace("_", "-");
println!(" {} {:7.7} {}",
- padded(&name[]), lint.default_level.as_str(), lint.desc);
+ padded(&name[..]), lint.default_level.as_str(), lint.desc);
}
println!("\n");
};
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
.collect::<Vec<String>>().connect(", ");
println!(" {} {}",
- padded(&name[]), desc);
+ padded(&name[..]), desc);
}
println!("\n");
};
}
let matches =
- match getopts::getopts(&args[], &config::optgroups()[]) {
+ match getopts::getopts(&args[..], &config::optgroups()[]) {
Ok(m) => m,
Err(f_stable_attempt) => {
// redo option parsing, including unstable options this time,
"run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
];
for note in &xs {
- emitter.emit(None, ¬e[], None, diagnostic::Note)
+ emitter.emit(None, ¬e[..], None, diagnostic::Note)
}
match r.read_to_string() {
ItemViaNode(node_id) =>
NodesMatchingDirect(Some(node_id).into_iter()),
ItemViaPath(ref parts) =>
- NodesMatchingSuffix(map.nodes_matching_suffix(&parts[])),
+ NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
}
}
user_option,
self.reconstructed_input(),
is_wrong_because);
- sess.fatal(&message[])
+ sess.fatal(&message[..])
};
let mut saw_node = ast::DUMMY_NODE_ID;
let is_expanded = needs_expansion(&ppm);
let compute_ast_map = needs_ast_map(&ppm, &opt_uii);
let krate = if compute_ast_map {
- match driver::phase_2_configure_and_expand(&sess, krate, &id[], None) {
+ match driver::phase_2_configure_and_expand(&sess, krate, &id[..], None) {
None => return,
Some(k) => k
}
};
let src_name = driver::source_name(input);
- let src = sess.codemap().get_filemap(&src_name[])
+ let src = sess.codemap().get_filemap(&src_name[..])
.src.as_bytes().to_vec();
let mut rdr = MemReader::new(src);
// point to what was found, if there's an
// accessible span.
match ast_map.opt_span(nodeid) {
- Some(sp) => sess.span_fatal(sp, &message[]),
- None => sess.fatal(&message[])
+ Some(sp) => sess.span_fatal(sp, &message[..]),
+ None => sess.fatal(&message[..])
}
}
}
pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> {
let name = format!("T{}", index);
- ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[]))
+ ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[..]))
}
pub fn re_early_bound(&self,
match result {
None => true,
Some((span, msg, note)) => {
- self.tcx.sess.span_err(span, &msg[]);
+ self.tcx.sess.span_err(span, &msg[..]);
match note {
Some((span, msg)) => {
- self.tcx.sess.span_note(span, &msg[])
+ self.tcx.sess.span_note(span, &msg[..])
}
None => {},
}
UnnamedField(idx) => format!("field #{} of {} is private",
idx + 1, struct_desc),
};
- self.tcx.sess.span_err(span, &msg[]);
+ self.tcx.sess.span_err(span, &msg[..]);
}
// Given the ID of a method, checks to ensure it's in scope.
&import_directive.module_path[],
import_directive.subclass),
help);
- self.resolve_error(span, &msg[]);
+ self.resolve_error(span, &msg[..]);
}
Indeterminate => break, // Bail out. We'll come around next time.
Success(()) => () // Good. Continue.
.iter()
.map(|seg| seg.identifier.name)
.collect();
- self.names_to_string(&names[])
+ self.names_to_string(&names[..])
}
fn import_directive_subclass_to_string(&mut self,
let module_path = &import_directive.module_path;
debug!("(resolving import for module) resolving import `{}::...` in `{}`",
- self.names_to_string(&module_path[]),
+ self.names_to_string(&module_path[..]),
self.module_to_string(&*module_));
// First, resolve the module path for the directive, if necessary.
Some((self.graph_root.get_module(), LastMod(AllPublic)))
} else {
match self.resolve_module_path(module_.clone(),
- &module_path[],
+ &module_path[..],
DontUseLexicalScope,
import_directive.span,
ImportSearch) {
ValueNS => "value",
},
&token::get_name(name));
- span_err!(self.session, import_span, E0252, "{}", &msg[]);
+ span_err!(self.session, import_span, E0252, "{}", &msg[..]);
}
Some(_) | None => {}
}
if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) {
let msg = format!("`{}` is not directly importable",
token::get_name(name));
- span_err!(self.session, import_span, E0253, "{}", &msg[]);
+ span_err!(self.session, import_span, E0253, "{}", &msg[..]);
}
}
crate in this module \
(maybe you meant `use {0}::*`?)",
&token::get_name(name));
- span_err!(self.session, import_span, E0254, "{}", &msg[]);
+ span_err!(self.session, import_span, E0254, "{}", &msg[..]);
}
Some(_) | None => {}
}
let msg = format!("import `{}` conflicts with value \
in this module",
&token::get_name(name));
- span_err!(self.session, import_span, E0255, "{}", &msg[]);
+ span_err!(self.session, import_span, E0255, "{}", &msg[..]);
if let Some(span) = value.value_span {
self.session.span_note(span,
"conflicting value here");
let msg = format!("import `{}` conflicts with type in \
this module",
&token::get_name(name));
- span_err!(self.session, import_span, E0256, "{}", &msg[]);
+ span_err!(self.session, import_span, E0256, "{}", &msg[..]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
"note conflicting type here")
let msg = format!("inherent implementations \
are only allowed on types \
defined in the current module");
- span_err!(self.session, span, E0257, "{}", &msg[]);
+ span_err!(self.session, span, E0257, "{}", &msg[..]);
self.session.span_note(import_span,
"import from other module here")
}
let msg = format!("import `{}` conflicts with existing \
submodule",
&token::get_name(name));
- span_err!(self.session, import_span, E0258, "{}", &msg[]);
+ span_err!(self.session, import_span, E0258, "{}", &msg[..]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
"note conflicting module here")
let segment_name = token::get_name(name);
let module_name = self.module_to_string(&*search_module);
let mut span = span;
- let msg = if "???" == &module_name[] {
+ let msg = if "???" == &module_name[..] {
span.hi = span.lo + Pos::from_usize(segment_name.len());
match search_parent_externals(name,
match module_prefix_result {
Failed(None) => {
let mpath = self.names_to_string(module_path);
- let mpath = &mpath[];
+ let mpath = &mpath[..];
match mpath.rfind(':') {
Some(idx) => {
let msg = format!("Could not find `{}` in `{}`",
let mut containing_module;
let mut i;
let first_module_path_string = token::get_name(module_path[0]);
- if "self" == &first_module_path_string[] {
+ if "self" == &first_module_path_string[..] {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 1;
- } else if "super" == &first_module_path_string[] {
+ } else if "super" == &first_module_path_string[..] {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below.
// Now loop through all the `super`s we find.
while i < module_path.len() {
let string = token::get_name(module_path[i]);
- if "super" != &string[] {
+ if "super" != &string[..] {
break
}
debug!("(resolving module prefix) resolving `super` at {}",
} else {
let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn);
- self.resolve_error((*imports)[index].span, &err[]);
+ self.resolve_error((*imports)[index].span, &err[..]);
}
}
generics,
implemented_traits,
&**self_type,
- &impl_items[]);
+ &impl_items[..]);
}
ItemTrait(_, ref generics, ref bounds, ref trait_items) => {
};
let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
- self.resolve_error(trait_reference.path.span, &msg[]);
+ self.resolve_error(trait_reference.path.span, &msg[..]);
}
Some(def) => {
match def {
None => {
let msg = format!("use of undeclared type name `{}`",
self.path_names_to_string(path));
- self.resolve_error(ty.span, &msg[]);
+ self.resolve_error(ty.span, &msg[..]);
}
}
}
def: {:?}", result);
let msg = format!("`{}` does not name a structure",
self.path_names_to_string(path));
- self.resolve_error(path.span, &msg[]);
+ self.resolve_error(path.span, &msg[..]);
}
}
}
let last_private;
let module = self.current_module.clone();
match self.resolve_module_path(module,
- &module_path[],
+ &module_path[..],
UseLexicalScope,
path.span,
PathSearch) {
let containing_module;
let last_private;
match self.resolve_module_path_from_root(root_module,
- &module_path[],
+ &module_path[..],
0,
path.span,
PathSearch,
Some((span, msg)) => (span, msg),
None => {
let msg = format!("Use of undeclared module `::{}`",
- self.names_to_string(&module_path[]));
+ self.names_to_string(&module_path[..]));
(path.span, msg)
}
};
}
} else {
match this.resolve_module_path(root,
- &name_path[],
+ &name_path[..],
UseLexicalScope,
span,
PathSearch) {
let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::<Vec<_>>();
// Look for a method in the current self type's impl module.
- match get_module(self, path.span, &name_path[]) {
+ match get_module(self, path.span, &name_path[..]) {
Some(module) => match module.children.borrow().get(&name) {
Some(binding) => {
let p_str = self.path_names_to_string(&path);
def: {:?}", result);
let msg = format!("`{}` does not name a structure",
self.path_names_to_string(path));
- self.resolve_error(path.span, &msg[]);
+ self.resolve_error(path.span, &msg[..]);
}
}
attrs: &[ast::Attribute],
input: &Input) -> String {
let validate = |s: String, span: Option<Span>| {
- creader::validate_crate_name(sess, &s[], span);
+ creader::validate_crate_name(sess, &s[..], span);
s
};
if let Some(sess) = sess {
if let Some(ref s) = sess.opts.crate_name {
if let Some((attr, ref name)) = attr_crate_name {
- if *s != &name[] {
+ if *s != &name[..] {
let msg = format!("--crate-name and #[crate_name] are \
required to match, but `{}` != `{}`",
s, name);
- sess.span_err(attr.span, &msg[]);
+ sess.span_err(attr.span, &msg[..]);
}
}
return validate(s.clone(), None);
symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str());
for meta in &*tcx.sess.crate_metadata.borrow() {
- symbol_hasher.input_str(&meta[]);
+ symbol_hasher.input_str(&meta[..]);
}
symbol_hasher.input_str("-");
symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]);
if result.len() > 0 &&
result.as_bytes()[0] != '_' as u8 &&
! (result.as_bytes()[0] as char).is_xid_start() {
- return format!("_{}", &result[]);
+ return format!("_{}", &result[..]);
}
return result;
hash.push(EXTRA_CHARS.as_bytes()[extra2] as char);
hash.push(EXTRA_CHARS.as_bytes()[extra3] as char);
- exported_name(path, &hash[])
+ exported_name(path, &hash[..])
}
pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>,
name: &str) -> String {
let s = ppaux::ty_to_string(ccx.tcx(), t);
- let path = [PathName(token::intern(&s[])),
+ let path = [PathName(token::intern(&s[..])),
gensym_name(name)];
let hash = get_symbol_hash(ccx, t);
- mangle(path.iter().cloned(), Some(&hash[]))
+ mangle(path.iter().cloned(), Some(&hash[..]))
}
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
for &(ref l, kind) in &*sess.cstore.get_used_libraries().borrow() {
match kind {
cstore::NativeStatic => {
- ab.add_native_library(&l[]).unwrap();
+ ab.add_native_library(&l[..]).unwrap();
}
cstore::NativeFramework | cstore::NativeUnknown => {}
}
e)[])
};
- let bc_data_deflated = match flate::deflate_bytes(&bc_data[]) {
+ let bc_data_deflated = match flate::deflate_bytes(&bc_data[..]) {
Some(compressed) => compressed,
None => sess.fatal(&format!("failed to compress bytecode from {}",
bc_filename.display())[])
try! { writer.write_all(RLIB_BYTECODE_OBJECT_MAGIC) };
try! { writer.write_le_u32(1) };
try! { writer.write_le_u64(bc_data_deflated_size) };
- try! { writer.write_all(&bc_data_deflated[]) };
+ try! { writer.write_all(&bc_data_deflated[..]) };
let number_of_bytes_written_so_far =
RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id
continue
}
};
- ab.add_rlib(&p, &name[], sess.lto()).unwrap();
+ ab.add_rlib(&p, &name[..], sess.lto()).unwrap();
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
all_native_libs.extend(native_libs.into_iter());
// The invocations of cc share some flags across platforms
let pname = get_cc_prog(sess);
- let mut cmd = Command::new(&pname[]);
+ let mut cmd = Command::new(&pname[..]);
cmd.args(&sess.target.target.options.pre_link_args[]);
link_args(&mut cmd, sess, dylib, tmpdir.path(),
sess.note(&format!("{:?}", &cmd)[]);
let mut output = prog.error.clone();
output.push_all(&prog.output[]);
- sess.note(str::from_utf8(&output[]).unwrap());
+ sess.note(str::from_utf8(&output[..]).unwrap());
sess.abort_if_errors();
}
debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap());
let mut v = b"-Wl,-force_load,".to_vec();
v.push_all(morestack.as_vec());
- cmd.arg(&v[]);
+ cmd.arg(&v[..]);
} else {
cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]);
}
if sess.opts.cg.rpath {
let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec();
v.push_all(out_filename.filename().unwrap());
- cmd.arg(&v[]);
+ cmd.arg(&v[..]);
}
} else {
cmd.arg("-shared");
// with any #[link_args] attributes found inside the crate
let empty = Vec::new();
cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]);
- cmd.args(&used_link_args[]);
+ cmd.args(&used_link_args[..]);
}
// # Native library linking
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
- let lib = archive::find_library(&l[],
+ let lib = archive::find_library(&l[..],
&sess.target.target.options.staticlib_prefix,
&sess.target.target.options.staticlib_suffix,
- &search_path[],
+ &search_path[..],
&sess.diagnostic().handler);
let mut v = b"-Wl,-force_load,".to_vec();
v.push_all(lib.as_vec());
- cmd.arg(&v[]);
+ cmd.arg(&v[..]);
}
}
if takes_hints {
cmd.arg(format!("-l{}", l));
}
cstore::NativeFramework => {
- cmd.arg("-framework").arg(&l[]);
+ cmd.arg("-framework").arg(&l[..]);
}
cstore::NativeStatic => unreachable!(),
}
let mut v = "-l".as_bytes().to_vec();
v.push_all(unlib(&sess.target, cratepath.filestem().unwrap()));
- cmd.arg(&v[]);
+ cmd.arg(&v[..]);
}
}
}
cstore::NativeFramework => {
cmd.arg("-framework");
- cmd.arg(&lib[]);
+ cmd.arg(&lib[..]);
}
cstore::NativeStatic => {
sess.bug("statics shouldn't be propagated");
bc_decoded.len() as libc::size_t) {
write::llvm_err(sess.diagnostic().handler(),
format!("failed to load bc of `{}`",
- &name[]));
+ &name[..]));
}
});
}
unsafe {
let cstr = llvm::LLVMRustGetLastError();
if cstr == ptr::null() {
- handler.fatal(&msg[]);
+ handler.fatal(&msg[..]);
} else {
let err = ffi::c_str_to_bytes(&cstr);
let err = String::from_utf8_lossy(err).to_string();
libc::free(cstr as *mut _);
handler.fatal(&format!("{}: {}",
- &msg[],
- &err[])[]);
+ &msg[..],
+ &err[..])[]);
}
}
}
Some(ref code) => {
handler.emit_with_code(None,
&diag.msg[],
- &code[],
+ &code[..],
diag.lvl);
},
None => {
fn create_target_machine(sess: &Session) -> TargetMachineRef {
let reloc_model_arg = match sess.opts.cg.relocation_model {
- Some(ref s) => &s[],
+ Some(ref s) => &s[..],
None => &sess.target.target.options.relocation_model[]
};
let reloc_model = match reloc_model_arg {
let fdata_sections = ffunction_sections;
let code_model_arg = match sess.opts.cg.code_model {
- Some(ref s) => &s[],
+ Some(ref s) => &s[..],
None => &sess.target.target.options.code_model[]
};
let msg = llvm::build_string(|s| llvm::LLVMWriteSMDiagnosticToString(diag, s))
.expect("non-UTF8 SMDiagnostic");
- report_inline_asm(cgcx, &msg[], cookie);
+ report_inline_asm(cgcx, &msg[..], cookie);
}
unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
};
let pname = get_cc_prog(sess);
- let mut cmd = Command::new(&pname[]);
+ let mut cmd = Command::new(&pname[..]);
cmd.args(&sess.target.target.options.pre_link_args[]);
cmd.arg("-nostdlib");
for i in 0..trans.modules.len() {
if modules_config.emit_obj {
let ext = format!("{}.o", i);
- remove(sess, &crate_output.with_extension(&ext[]));
+ remove(sess, &crate_output.with_extension(&ext[..]));
}
if modules_config.emit_bc && !keep_numbered_bitcode {
let ext = format!("{}.bc", i);
- remove(sess, &crate_output.with_extension(&ext[]));
+ remove(sess, &crate_output.with_extension(&ext[..]));
}
}
pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
let pname = get_cc_prog(sess);
- let mut cmd = Command::new(&pname[]);
+ let mut cmd = Command::new(&pname[..]);
cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject))
.arg(outputs.temp_path(config::OutputTypeAssembly));
sess.note(&format!("{:?}", &cmd)[]);
let mut note = prog.error.clone();
note.push_all(&prog.output[]);
- sess.note(str::from_utf8(¬e[]).unwrap());
+ sess.note(str::from_utf8(¬e[..]).unwrap());
sess.abort_if_errors();
}
},
};
self.fmt.sub_mod_ref_str(path.span,
*span,
- &qualname[],
+ &qualname[..],
self.cur_scope);
}
}
};
self.fmt.sub_mod_ref_str(path.span,
*span,
- &qualname[],
+ &qualname[..],
self.cur_scope);
}
}
let (ref span, ref qualname) = sub_paths[len-2];
self.fmt.sub_type_ref_str(path.span,
*span,
- &qualname[]);
+ &qualname[..]);
// write the other sub-paths
if len <= 2 {
for &(ref span, ref qualname) in sub_paths {
self.fmt.sub_mod_ref_str(path.span,
*span,
- &qualname[],
+ &qualname[..],
self.cur_scope);
}
}
id,
qualname,
&path_to_string(p)[],
- &typ[]);
+ &typ[..]);
}
self.collected_paths.clear();
}
};
let qualname = format!("{}::{}", qualname, &get_ident(method.pe_ident()));
- let qualname = &qualname[];
+ let qualname = &qualname[..];
// record the decl for this def (if it has one)
let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx,
Some(sub_span) => self.fmt.field_str(field.span,
Some(sub_span),
field.node.id,
- &name[],
- &qualname[],
- &typ[],
+ &name[..],
+ &qualname[..],
+ &typ[..],
scope_id),
None => self.sess.span_bug(field.span,
&format!("Could not find sub-span for field {}",
self.fmt.typedef_str(full_span,
Some(*param_ss),
param.id,
- &name[],
+ &name[..],
"");
}
self.visit_generics(generics);
self.fmt.fn_str(item.span,
sub_span,
item.id,
- &qualname[],
+ &qualname[..],
self.cur_scope);
- self.process_formals(&decl.inputs, &qualname[]);
+ self.process_formals(&decl.inputs, &qualname[..]);
// walk arg and return types
for arg in &decl.inputs {
// walk the body
self.nest(item.id, |v| v.visit_block(&*body));
- self.process_generic_params(ty_params, item.span, &qualname[], item.id);
+ self.process_generic_params(ty_params, item.span, &qualname[..], item.id);
}
fn process_static(&mut self,
sub_span,
item.id,
&get_ident(item.ident),
- &qualname[],
- &value[],
+ &qualname[..],
+ &value[..],
&ty_to_string(&*typ)[],
self.cur_scope);
sub_span,
item.id,
&get_ident(item.ident),
- &qualname[],
+ &qualname[..],
"",
&ty_to_string(&*typ)[],
self.cur_scope);
sub_span,
item.id,
ctor_id,
- &qualname[],
+ &qualname[..],
self.cur_scope,
- &val[]);
+ &val[..]);
// fields
for field in &def.fields {
- self.process_struct_field_def(field, &qualname[], item.id);
+ self.process_struct_field_def(field, &qualname[..], item.id);
self.visit_ty(&*field.node.ty);
}
- self.process_generic_params(ty_params, item.span, &qualname[], item.id);
+ self.process_generic_params(ty_params, item.span, &qualname[..], item.id);
}
fn process_enum(&mut self,
Some(sub_span) => self.fmt.enum_str(item.span,
Some(sub_span),
item.id,
- &enum_name[],
+ &enum_name[..],
self.cur_scope,
- &val[]),
+ &val[..]),
None => self.sess.span_bug(item.span,
&format!("Could not find subspan for enum {}",
enum_name)[]),
self.span.span_for_first_ident(variant.span),
variant.node.id,
name,
- &qualname[],
- &enum_name[],
- &val[],
+ &qualname[..],
+ &enum_name[..],
+ &val[..],
item.id);
for arg in args {
self.visit_ty(&*arg.ty);
self.span.span_for_first_ident(variant.span),
variant.node.id,
ctor_id,
- &qualname[],
- &enum_name[],
- &val[],
+ &qualname[..],
+ &enum_name[..],
+ &val[..],
item.id);
for field in &struct_def.fields {
}
}
- self.process_generic_params(ty_params, item.span, &enum_name[], item.id);
+ self.process_generic_params(ty_params, item.span, &enum_name[..], item.id);
}
fn process_impl(&mut self,
self.fmt.trait_str(item.span,
sub_span,
item.id,
- &qualname[],
+ &qualname[..],
self.cur_scope,
- &val[]);
+ &val[..]);
// super-traits
for super_bound in &**trait_refs {
}
// walk generics and methods
- self.process_generic_params(generics, item.span, &qualname[], item.id);
+ self.process_generic_params(generics, item.span, &qualname[..], item.id);
for method in methods {
self.visit_trait_item(method)
}
self.fmt.mod_str(item.span,
sub_span,
item.id,
- &qualname[],
+ &qualname[..],
self.cur_scope,
- &filename[]);
+ &filename[..]);
self.nest(item.id, |v| visit::walk_mod(v, m));
}
self.cur_scope);
// walk receiver and args
- visit::walk_exprs(self, &args[]);
+ visit::walk_exprs(self, &args[..]);
}
fn process_pat(&mut self, p:&ast::Pat) {
item.id,
cnum,
name,
- &location[],
+ &location[..],
self.cur_scope);
}
ast::ItemFn(ref decl, _, _, ref ty_params, ref body) =>
self.fmt.typedef_str(item.span,
sub_span,
item.id,
- &qualname[],
- &value[]);
+ &qualname[..],
+ &value[..]);
self.visit_ty(&**ty);
self.process_generic_params(ty_params, item.span, &qualname, item.id);
};
qualname.push_str(&get_ident(method_type.ident));
- let qualname = &qualname[];
+ let qualname = &qualname[..];
let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn);
self.fmt.method_decl_str(method_type.span,
let mut id = String::from_str("$");
id.push_str(&ex.id.to_string()[]);
- self.process_formals(&decl.inputs, &id[]);
+ self.process_formals(&decl.inputs, &id[..]);
// walk arg and return types
for arg in &decl.inputs {
Some(p.span),
id,
&path_to_string(p)[],
- &value[],
+ &value[..],
"")
}
def::DefVariant(..) | def::DefTy(..) | def::DefStruct(..) => {
sub_span,
id,
&path_to_string(p)[],
- &value[],
- &typ[]);
+ &value[..],
+ &typ[..]);
}
self.collected_paths.clear();
cur_scope: 0
};
- visitor.dump_crate_info(&cratename[], krate);
+ visitor.dump_crate_info(&cratename[..], krate);
visit::walk_crate(&mut visitor, krate);
}
assert!(self.dump_spans);
let result = format!("span,kind,{},{},text,\"{}\"\n",
kind, su.extent_str(span), escape(su.snippet(span)));
- self.record(&result[]);
+ self.record(&result[..]);
}
}
if s.len() > 1020 {
&s[..1020]
} else {
- &s[]
+ &s[..]
}
});
let pairs = fields.iter().zip(values);
let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from_str(v))));
Some(strs.fold(String::new(), |mut s, ss| {
- s.push_str(&ss[]);
+ s.push_str(&ss[..]);
s
}))
}
};
let mut result = String::from_str(label);
- result.push_str(&values_str[]);
+ result.push_str(&values_str[..]);
result.push_str("\n");
- self.recorder.record(&result[]);
+ self.recorder.record(&result[..]);
}
pub fn record_with_span(&mut self,
None => return,
};
let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str);
- self.recorder.record(&result[]);
+ self.recorder.record(&result[..]);
}
pub fn check_and_record(&mut self,
param_env: param_env,
};
enter_match(bcx, dm, m, col, val, |pats|
- check_match::specialize(&mcx, &pats[], &ctor, col, variant_size)
+ check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size)
)
}
if has_nested_bindings(m, col) {
let expanded = expand_nested_bindings(bcx, m, col, val);
compile_submatch_continue(bcx,
- &expanded[],
+ &expanded[..],
vals,
chk,
col,
}
let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
let mut opt_vals = unpacked;
- opt_vals.push_all(&vals_left[]);
+ opt_vals.push_all(&vals_left[..]);
compile_submatch(opt_cx,
- &opt_ms[],
- &opt_vals[],
+ &opt_ms[..],
+ &opt_vals[..],
branch_chk.as_ref().unwrap_or(chk),
has_genuine_default);
}
}
_ => {
compile_submatch(else_cx,
- &defaults[],
- &vals_left[],
+ &defaults[..],
+ &vals_left[..],
chk,
has_genuine_default);
}
&& arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle)
});
- compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default);
+ compile_submatch(bcx, &matches[..], &[discr_datum.val], &chk, has_default);
let mut arm_cxs = Vec::new();
for arm_data in &arm_datas {
arm_cxs.push(bcx);
}
- bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[]);
+ bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]);
return bcx;
}
t: Ty<'tcx>) -> Repr<'tcx> {
match t.sty {
ty::ty_tup(ref elems) => {
- Univariant(mk_struct(cx, &elems[], false, t), false)
+ Univariant(mk_struct(cx, &elems[..], false, t), false)
}
ty::ty_struct(def_id, substs) => {
let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
if dtor { ftys.push(cx.tcx().types.bool); }
- Univariant(mk_struct(cx, &ftys[], packed, t), dtor)
+ Univariant(mk_struct(cx, &ftys[..], packed, t), dtor)
}
ty::ty_closure(def_id, _, substs) => {
let typer = NormalizingClosureTyper::new(cx.tcx());
let upvars = typer.closure_upvars(def_id, substs).unwrap();
let upvar_types = upvars.iter().map(|u| u.ty).collect::<Vec<_>>();
- Univariant(mk_struct(cx, &upvar_types[], false, t), false)
+ Univariant(mk_struct(cx, &upvar_types[..], false, t), false)
}
ty::ty_enum(def_id, substs) => {
let cases = get_cases(cx.tcx(), def_id, substs);
// (Typechecking will reject discriminant-sizing attrs.)
assert_eq!(hint, attr::ReprAny);
let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() };
- return Univariant(mk_struct(cx, &ftys[], false, t),
+ return Univariant(mk_struct(cx, &ftys[..], false, t),
dtor);
}
assert_eq!(hint, attr::ReprAny);
let mut ftys = cases[0].tys.clone();
if dtor { ftys.push(cx.tcx().types.bool); }
- return Univariant(mk_struct(cx, &ftys[], false, t),
+ return Univariant(mk_struct(cx, &ftys[..], false, t),
dtor);
}
let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity));
ftys.push_all(&c.tys[]);
if dtor { ftys.push(cx.tcx().types.bool); }
- mk_struct(cx, &ftys[], false, t)
+ mk_struct(cx, &ftys[..], false, t)
}).collect();
- ensure_enum_fits_in_address_space(cx, &fields[], t);
+ ensure_enum_fits_in_address_space(cx, &fields[..], t);
General(ity, fields, dtor)
}
.map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
};
- ensure_struct_fits_in_address_space(cx, &lltys[], packed, scapegoat);
+ ensure_struct_fits_in_address_space(cx, &lltys[..], packed, scapegoat);
- let llty_rec = Type::struct_(cx, &lltys[], packed);
+ let llty_rec = Type::struct_(cx, &lltys[..], packed);
Struct {
size: machine::llsize_of_alloc(cx, llty_rec),
align: machine::llalign_of_min(cx, llty_rec),
// of the size.
//
// FIXME #10604: this breaks when vector types are present.
- let (size, align) = union_size_and_align(&sts[]);
+ let (size, align) = union_size_and_align(&sts[..]);
let align_s = align as u64;
assert_eq!(size % align_s, 0);
let align_units = size / align_s - 1;
Type::array(&discr_ty, align_s / discr_size - 1),
fill_ty];
match name {
- None => Type::struct_(cx, &fields[], false),
+ None => Type::struct_(cx, &fields[..], false),
Some(name) => {
let mut llty = Type::named_struct(cx, name);
- llty.set_struct_body(&fields[], false);
+ llty.set_struct_body(&fields[..], false);
llty
}
}
fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField,
scrutinee: ValueRef) -> ValueRef {
- let llptrptr = GEPi(bcx, scrutinee, &discrfield[]);
+ let llptrptr = GEPi(bcx, scrutinee, &discrfield[..]);
let llptr = Load(bcx, llptrptr);
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)), DebugLoc::None)
}
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
if discr != nndiscr {
- let llptrptr = GEPi(bcx, val, &discrfield[]);
+ let llptrptr = GEPi(bcx, val, &discrfield[..]);
let llptrty = val_ty(llptrptr).element_type();
Store(bcx, C_null(llptrty), llptrptr)
}
let val = if needs_cast {
let ccx = bcx.ccx();
let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
- let real_ty = Type::struct_(ccx, &fields[], st.packed);
+ let real_ty = Type::struct_(ccx, &fields[..], st.packed);
PointerCast(bcx, val, real_ty.ptr_to())
} else {
val
let fields = case.fields.iter().map(|&ty|
type_of::type_of(bcx.ccx(), ty)).collect::<Vec<_>>();
- let real_ty = Type::struct_(ccx, &fields[], case.packed);
+ let real_ty = Type::struct_(ccx, &fields[..], case.packed);
let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to());
variant_cx = f(variant_cx, case, variant_value);
}
General(ity, ref cases, _) => {
let case = &cases[discr as uint];
- let (max_sz, _) = union_size_and_align(&cases[]);
+ let (max_sz, _) = union_size_and_align(&cases[..]);
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
let mut f = vec![lldiscr];
f.push_all(vals);
- let mut contents = build_const_struct(ccx, case, &f[]);
+ let mut contents = build_const_struct(ccx, case, &f[..]);
contents.push_all(&[padding(ccx, max_sz - case.size)]);
- C_struct(ccx, &contents[], false)
+ C_struct(ccx, &contents[..], false)
}
Univariant(ref st, _dro) => {
assert!(discr == 0);
let contents = build_const_struct(ccx, st, vals);
- C_struct(ccx, &contents[], st.packed)
+ C_struct(ccx, &contents[..], st.packed)
}
RawNullablePointer { nndiscr, nnty, .. } => {
if discr == nndiscr {
}).collect::<Vec<ValueRef>>();
C_struct(ccx, &build_const_struct(ccx,
nonnull,
- &vals[])[],
+ &vals[..])[],
false)
}
}
callee::DontAutorefArg)
})
}).collect::<Vec<_>>();
- inputs.push_all(&ext_inputs[]);
+ inputs.push_all(&ext_inputs[..]);
// no failure occurred preparing operands, no need to cleanup
fcx.pop_custom_cleanup_scope(temp_scope);
if !clobbers.is_empty() {
clobbers.push(',');
}
- clobbers.push_str(&more_clobbers[]);
+ clobbers.push_str(&more_clobbers[..]);
}
// Add the clobbers to our constraints list
if clobbers.len() != 0 && constraints.len() != 0 {
constraints.push(',');
- constraints.push_str(&clobbers[]);
+ constraints.push_str(&clobbers[..]);
} else {
- constraints.push_str(&clobbers[]);
+ constraints.push_str(&clobbers[..]);
}
- debug!("Asm Constraints: {}", &constraints[]);
+ debug!("Asm Constraints: {}", &constraints[..]);
let num_outputs = outputs.len();
} else if num_outputs == 1 {
output_types[0]
} else {
- Type::struct_(bcx.ccx(), &output_types[], false)
+ Type::struct_(bcx.ccx(), &output_types[..], false)
};
let dialect = match ia.dialect {
let f = decl_rust_fn(ccx, fn_ty, name);
let attrs = csearch::get_item_attrs(&ccx.sess().cstore, did);
- set_llvm_fn_attrs(ccx, &attrs[], f);
+ set_llvm_fn_attrs(ccx, &attrs[..], f);
ccx.externs().borrow_mut().insert(name.to_string(), f);
f
ty::mk_nil(ccx.tcx()));
get_extern_fn(ccx,
&mut *ccx.externs().borrow_mut(),
- &name[],
+ &name[..],
llvm::CCallConv,
llty,
dtor_ty)
ty::ty_bare_fn(_, ref fn_ty) => {
match ccx.sess().target.target.adjust_abi(fn_ty.abi) {
Rust | RustCall => {
- get_extern_rust_fn(ccx, t, &name[], did)
+ get_extern_rust_fn(ccx, t, &name[..], did)
}
RustIntrinsic => {
ccx.sess().bug("unexpected intrinsic in trans_external_path")
}
_ => {
foreign::register_foreign_item_fn(ccx, fn_ty.abi, t,
- &name[])
+ &name[..])
}
}
}
let llresult = Invoke(bcx,
llfn,
- &llargs[],
+ &llargs[..],
normal_bcx.llbb,
landing_pad,
Some(attributes),
let llresult = Call(bcx,
llfn,
- &llargs[],
+ &llargs[..],
Some(attributes),
debug_loc);
return (llresult, bcx);
"argtuple",
arg_scope_id));
let untupled_arg_types = match monomorphized_arg_types[0].sty {
- ty::ty_tup(ref types) => &types[],
+ ty::ty_tup(ref types) => &types[..],
_ => {
bcx.tcx().sess.span_bug(args[0].pat.span,
"first arg to `rust-call` ABI function \
let arg_datums = if abi != RustCall {
create_datums_for_fn_args(&fcx,
- &monomorphized_arg_types[])
+ &monomorphized_arg_types[..])
} else {
create_datums_for_fn_args_under_call_abi(
bcx,
arg_scope,
- &monomorphized_arg_types[])
+ &monomorphized_arg_types[..])
};
bcx = match closure_env {
arg_scope,
&decl.inputs[],
arg_datums,
- &monomorphized_arg_types[])
+ &monomorphized_arg_types[..])
}
};
bcx = expr::trans_adt(bcx,
result_ty,
disr,
- &fields[],
+ &fields[..],
None,
expr::SaveIn(llresult),
debug_loc);
ty::erase_late_bound_regions(
ccx.tcx(), &ty::ty_fn_args(ctor_ty));
- let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[]);
+ let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[..]);
if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) {
let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot");
ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => {
meth::trans_impl(ccx,
item.ident,
- &impl_items[],
+ &impl_items[..],
generics,
item.id);
}
_ => panic!("expected bare rust fn")
};
- let llfn = decl_rust_fn(ccx, node_type, &sym[]);
+ let llfn = decl_rust_fn(ccx, node_type, &sym[..]);
finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn
}
match fn_sig.inputs[1].sty {
ty::ty_tup(ref t_in) => {
- inputs.push_all(&t_in[]);
+ inputs.push_all(&t_in[..]);
inputs
}
_ => ccx.sess().bug("expected tuple'd inputs")
debug!("register_fn_llvmty id={} sym={}", node_id, sym);
let llfn = decl_fn(ccx,
- &sym[],
+ &sym[..],
cc,
llfty,
ty::FnConverging(ty::mk_nil(ccx.tcx())));
let (start_fn, args) = if use_start_lang_item {
let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) {
Ok(id) => id,
- Err(s) => { ccx.sess().fatal(&s[]); }
+ Err(s) => { ccx.sess().fatal(&s[..]); }
};
let start_fn = if start_def_id.krate == ast::LOCAL_CRATE {
get_item_val(ccx, start_def_id.node)
} else {
llvm::LLVMTypeOf(v)
};
- if contains_null(&sym[]) {
+ if contains_null(&sym[..]) {
ccx.sess().fatal(
&format!("Illegal null byte in export_name \
value: `{}`", sym)[]);
Some(compressed) => compressed,
None => cx.sess().fatal("failed to compress metadata"),
});
- let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[]);
+ let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
let name = format!("rust_metadata_{}_{}",
cx.link_meta().crate_name,
} else {
let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
self.count_insn("gepi");
- self.inbounds_gep(base, &v[])
+ self.inbounds_gep(base, &v[..])
}
}
let s = format!("{} ({})",
text,
self.ccx.sess().codemap().span_to_string(sp));
- debug!("{}", &s[]);
- self.add_comment(&s[]);
+ debug!("{}", &s[..]);
+ self.add_comment(&s[..]);
}
}
}).collect::<Vec<_>>();
debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output));
- let fty = Type::func(&argtys[], &output);
+ let fty = Type::func(&argtys[..], &output);
unsafe {
let v = llvm::LLVMInlineAsm(
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
let llfn =
decl_internal_rust_fn(ccx,
tuple_fn_ty,
- &function_name[]);
+ &function_name[..]);
//
let empty_substs = tcx.mk_substs(Substs::trans_empty());
DebugLoc::None,
bare_fn_ty,
|bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) },
- ArgVals(&llargs[]),
+ ArgVals(&llargs[..]),
dest).bcx;
finish_fn(&fcx, bcx, sig.output, DebugLoc::None);
// Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx,
llfn,
- &llargs[],
+ &llargs[..],
callee_ty,
debug_loc);
bcx = b;
callee_ty,
llfn,
opt_llretslot.unwrap(),
- &llargs[],
+ &llargs[..],
arg_tys,
debug_loc);
}
let name = scope.block_name("clean");
debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(label.is_unwind(),
- &name[],
+ &name[..],
None);
let mut bcx_out = bcx_in;
for cleanup in scope.cleanups.iter().rev() {
Some(llbb) => { return llbb; }
None => {
let name = last_scope.block_name("unwind");
- pad_bcx = self.new_block(true, &name[], None);
+ pad_bcx = self.new_block(true, &name[..], None);
last_scope.cached_landing_pad = Some(pad_bcx.llbb);
}
}
mangle_internal_name_by_path_and_seq(path, "closure")
});
- let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[]);
+ let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[..]);
// set an inline hint for all closures
set_inline_hint(llfn);
&[],
sig.output,
function_type.abi,
- ClosureEnv::Closure(&freevars[]));
+ ClosureEnv::Closure(&freevars[..]));
// Don't hoist this to the top of the function. It's perfectly legitimate
// to have a zero-size closure (in which case dest will be `Ignore`) and
Err(s) => {
let msg = format!("{} {}", msg, s);
match span {
- Some(span) => bcx.tcx().sess.span_fatal(span, &msg[]),
- None => bcx.tcx().sess.fatal(&msg[]),
+ Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
+ None => bcx.tcx().sess.fatal(&msg[..]),
}
}
}
ast::LitBool(b) => C_bool(cx, b),
ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
ast::LitBinary(ref data) => {
- let g = addr_of(cx, C_bytes(cx, &data[]), "binary", e.id);
+ let g = addr_of(cx, C_bytes(cx, &data[..]), "binary", e.id);
let base = ptrcast(g, Type::i8p(cx));
let prev_const = cx.const_unsized().borrow_mut()
.insert(base, g);
}
ast::ExprTup(ref es) => {
let repr = adt::represent_type(cx, ety);
- let vals = map_list(&es[]);
- adt::trans_const(cx, &*repr, 0, &vals[])
+ let vals = map_list(&es[..]);
+ adt::trans_const(cx, &*repr, 0, &vals[..])
}
ast::ExprStruct(_, ref fs, ref base_opt) => {
let repr = adt::represent_type(cx, ety);
}
}).collect::<Vec<_>>();
if ty::type_is_simd(cx.tcx(), ety) {
- C_vector(&cs[])
+ C_vector(&cs[..])
} else {
- adt::trans_const(cx, &*repr, discr, &cs[])
+ adt::trans_const(cx, &*repr, discr, &cs[..])
}
})
}
.collect::<Vec<_>>();
// If the vector contains enums, an LLVM array won't work.
if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
- C_struct(cx, &vs[], false)
+ C_struct(cx, &vs[..], false)
} else {
- C_array(llunitty, &vs[])
+ C_array(llunitty, &vs[..])
}
}
ast::ExprRepeat(ref elem, ref count) => {
let unit_val = const_expr(cx, &**elem, param_substs).0;
let vs: Vec<_> = repeat(unit_val).take(n).collect();
if val_ty(unit_val) != llunitty {
- C_struct(cx, &vs[], false)
+ C_struct(cx, &vs[..], false)
} else {
- C_array(llunitty, &vs[])
+ C_array(llunitty, &vs[..])
}
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
}
ast::ExprCall(ref callee, ref args) => {
let opt_def = cx.tcx().def_map.borrow().get(&callee.id).cloned();
- let arg_vals = map_list(&args[]);
+ let arg_vals = map_list(&args[..]);
match opt_def {
Some(def::DefStruct(_)) => {
if ty::type_is_simd(cx.tcx(), ety) {
- C_vector(&arg_vals[])
+ C_vector(&arg_vals[..])
} else {
let repr = adt::represent_type(cx, ety);
- adt::trans_const(cx, &*repr, 0, &arg_vals[])
+ adt::trans_const(cx, &*repr, 0, &arg_vals[..])
}
}
Some(def::DefVariant(enum_did, variant_did, _)) => {
adt::trans_const(cx,
&*repr,
vinfo.disr_val,
- &arg_vals[])
+ &arg_vals[..])
}
_ => cx.sess().span_bug(e.span, "expected a struct or variant def")
}
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let llmod_id = format!("{}.{}.rs", crate_name, i);
- let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[]);
+ let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[..]);
shared_ccx.local_ccxs.push(local_ccx);
}
}
let name = format!("then-block-{}-", thn.id);
- let then_bcx_in = bcx.fcx.new_id_block(&name[], thn.id);
+ let then_bcx_in = bcx.fcx.new_id_block(&name[..], thn.id);
let then_bcx_out = trans_block(then_bcx_in, &*thn, dest);
trans::debuginfo::clear_source_location(bcx.fcx);
let did = langcall(bcx, Some(call_info.span), "", PanicFnLangItem);
let bcx = callee::trans_lang_call(bcx,
did,
- &args[],
+ &args[..],
Some(expr::Ignore),
call_info.debug_loc()).bcx;
Unreachable(bcx);
let did = langcall(bcx, Some(call_info.span), "", PanicBoundsCheckFnLangItem);
let bcx = callee::trans_lang_call(bcx,
did,
- &args[],
+ &args[..],
Some(expr::Ignore),
call_info.debug_loc()).bcx;
Unreachable(bcx);
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id);
cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!",
- &unique_type_id_str[])[]);
+ &unique_type_id_str[..])[]);
}
}
self.get_unique_type_id_of_type(cx, component_type);
let component_type_id =
self.get_unique_type_id_as_string(component_type_id);
- unique_type_id.push_str(&component_type_id[]);
+ unique_type_id.push_str(&component_type_id[..]);
}
},
ty::ty_uniq(inner_type) => {
unique_type_id.push('~');
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(&inner_type_id[]);
+ unique_type_id.push_str(&inner_type_id[..]);
},
ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => {
unique_type_id.push('*');
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(&inner_type_id[]);
+ unique_type_id.push_str(&inner_type_id[..]);
},
ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => {
unique_type_id.push('&');
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(&inner_type_id[]);
+ unique_type_id.push_str(&inner_type_id[..]);
},
ty::ty_vec(inner_type, optional_length) => {
match optional_length {
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(&inner_type_id[]);
+ unique_type_id.push_str(&inner_type_id[..]);
},
ty::ty_trait(ref trait_data) => {
unique_type_id.push_str("trait ");
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
self.get_unique_type_id_as_string(parameter_type_id);
- unique_type_id.push_str(¶meter_type_id[]);
+ unique_type_id.push_str(¶meter_type_id[..]);
unique_type_id.push(',');
}
ty::FnConverging(ret_ty) => {
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
- unique_type_id.push_str(&return_type_id[]);
+ unique_type_id.push_str(&return_type_id[..]);
}
ty::FnDiverging => {
unique_type_id.push_str("!");
type_map.get_unique_type_id_of_type(cx, type_parameter);
let param_type_id =
type_map.get_unique_type_id_as_string(param_type_id);
- output.push_str(¶m_type_id[]);
+ output.push_str(¶m_type_id[..]);
output.push(',');
}
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
self.get_unique_type_id_as_string(parameter_type_id);
- unique_type_id.push_str(¶meter_type_id[]);
+ unique_type_id.push_str(¶meter_type_id[..]);
unique_type_id.push(',');
}
ty::FnConverging(ret_ty) => {
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
- unique_type_id.push_str(&return_type_id[]);
+ unique_type_id.push_str(&return_type_id[..]);
}
ty::FnDiverging => {
unique_type_id.push_str("!");
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
let var_name = token::get_ident(ident).to_string();
let linkage_name =
- namespace_node.mangled_name_of_contained_item(&var_name[]);
+ namespace_node.mangled_name_of_contained_item(&var_name[..]);
let var_scope = namespace_node.scope;
let var_name = CString::from_slice(var_name.as_bytes());
match expr.node {
ast::ExprClosure(_, ref fn_decl, ref top_level_block) => {
let name = format!("fn{}", token::gensym("fn"));
- let name = token::str_to_ident(&name[]);
+ let name = token::str_to_ident(&name[..]);
(name, &**fn_decl,
// This is not quite right. It should actually inherit
// the generics of the enclosing function.
let (linkage_name, containing_scope) = if has_path {
let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id));
let linkage_name = namespace_node.mangled_name_of_contained_item(
- &function_name[]);
+ &function_name[..]);
let containing_scope = namespace_node.scope;
(linkage_name, containing_scope)
} else {
signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP));
}
- return create_DIArray(DIB(cx), &signature[]);
+ return create_DIArray(DIB(cx), &signature[..]);
}
fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
actual_self_type,
true);
- name_to_append_suffix_to.push_str(&actual_self_type_name[]);
+ name_to_append_suffix_to.push_str(&actual_self_type_name[..]);
if generics.is_type_parameterized() {
name_to_append_suffix_to.push_str(",");
let actual_type_name = compute_debuginfo_type_name(cx,
actual_type,
true);
- name_to_append_suffix_to.push_str(&actual_type_name[]);
+ name_to_append_suffix_to.push_str(&actual_type_name[..]);
if index != generics.ty_params.len() - 1 {
name_to_append_suffix_to.push_str(",");
name_to_append_suffix_to.push('>');
- return create_DIArray(DIB(cx), &template_params[]);
+ return create_DIArray(DIB(cx), &template_params[..]);
}
}
let cx: &CrateContext = bcx.ccx();
let filename = span_start(cx, span).file.name.clone();
- let file_metadata = file_metadata(cx, &filename[]);
+ let file_metadata = file_metadata(cx, &filename[..]);
let name = token::get_ident(variable_ident);
let loc = span_start(cx, span);
set_members_of_composite_type(cx,
metadata_stub,
llvm_type,
- &member_descriptions[]);
+ &member_descriptions[..]);
return MetadataCreationResult::new(metadata_stub, true);
}
}
let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type,
- &struct_name[],
+ &struct_name[..],
unique_type_id,
containing_scope);
unique_type_id,
create_struct_stub(cx,
tuple_llvm_type,
- &tuple_name[],
+ &tuple_name[..],
unique_type_id,
UNKNOWN_SCOPE_METADATA),
tuple_llvm_type,
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
- &member_descriptions[]);
+ &member_descriptions[..]);
MemberDescription {
name: "".to_string(),
llvm_type: variant_llvm_type,
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
- &member_descriptions[]);
+ &member_descriptions[..]);
vec![
MemberDescription {
name: "".to_string(),
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
- &variant_member_descriptions[]);
+ &variant_member_descriptions[..]);
// Encode the information about the null variant in the union
// member's name.
.collect();
unsafe {
- let type_array = create_DIArray(DIB(cx), &member_metadata[]);
+ let type_array = create_DIArray(DIB(cx), &member_metadata[..]);
llvm::LLVMDICompositeTypeSetTypeArray(DIB(cx), composite_type_metadata, type_array);
}
}
let member_llvm_types = slice_llvm_type.field_types();
assert!(slice_layout_is_correct(cx,
- &member_llvm_types[],
+ &member_llvm_types[..],
element_type));
let member_descriptions = [
MemberDescription {
let metadata = composite_type_metadata(cx,
slice_llvm_type,
- &slice_type_name[],
+ &slice_type_name[..],
unique_type_id,
&member_descriptions,
UNKNOWN_SCOPE_METADATA,
llvm::LLVMDIBuilderCreateSubroutineType(
DIB(cx),
UNKNOWN_FILE_METADATA,
- create_DIArray(DIB(cx), &signature_metadata[]))
+ create_DIArray(DIB(cx), &signature_metadata[..]))
},
false);
}
let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type);
cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \
trait_pointer_metadata(): {}",
- &pp_type_name[])[]);
+ &pp_type_name[..])[]);
}
};
composite_type_metadata(cx,
trait_llvm_type,
- &trait_type_name[],
+ &trait_type_name[..],
unique_type_id,
&[],
containing_scope,
ty::ty_tup(ref elements) => {
prepare_tuple_metadata(cx,
t,
- &elements[],
+ &elements[..],
unique_type_id,
usage_site_span).finalize(cx)
}
type id '{}' to already be in \
the debuginfo::TypeMap but it \
was not. (Ty = {})",
- &unique_type_id_str[],
+ &unique_type_id_str[..],
ppaux::ty_to_string(cx.tcx(), t));
- cx.sess().span_bug(usage_site_span, &error_message[]);
+ cx.sess().span_bug(usage_site_span, &error_message[..]);
}
};
UniqueTypeId maps in \
debuginfo::TypeMap. \
UniqueTypeId={}, Ty={}",
- &unique_type_id_str[],
+ &unique_type_id_str[..],
ppaux::ty_to_string(cx.tcx(), t));
- cx.sess().span_bug(usage_site_span, &error_message[]);
+ cx.sess().span_bug(usage_site_span, &error_message[..]);
}
}
None => {
attributes.iter().any(|attr| {
let meta_item: &ast::MetaItem = &*attr.node.value;
match meta_item.node {
- ast::MetaWord(ref value) => &value[] == "no_debug",
+ ast::MetaWord(ref value) => &value[..] == "no_debug",
_ => false
}
})
controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
}
ast::ExprMatch(ref discr, ref arms, _) => {
- _match::trans_match(bcx, expr, &**discr, &arms[], dest)
+ _match::trans_match(bcx, expr, &**discr, &arms[..], dest)
}
ast::ExprBlock(ref blk) => {
controlflow::trans_block(bcx, &**blk, dest)
}
ast::ExprStruct(_, ref fields, ref base) => {
trans_struct(bcx,
- &fields[],
+ &fields[..],
base.as_ref().map(|e| &**e),
expr.span,
expr.id,
trans_adt(bcx,
expr_ty(bcx, expr),
0,
- &numbered_fields[],
+ &numbered_fields[..],
None,
dest,
expr.debug_loc())
trans_overloaded_call(bcx,
expr,
&**f,
- &args[],
+ &args[..],
Some(dest))
} else {
callee::trans_call(bcx,
expr,
&**f,
- callee::ArgExprs(&args[]),
+ callee::ArgExprs(&args[..]),
dest)
}
}
callee::trans_method_call(bcx,
expr,
&*args[0],
- callee::ArgExprs(&args[]),
+ callee::ArgExprs(&args[..]),
dest)
}
ast::ExprBinary(op, ref lhs, ref rhs) => {
ty::ty_struct(did, substs) => {
let fields = struct_fields(tcx, did, substs);
let fields = monomorphize::normalize_associated_type(tcx, &fields);
- op(0, &fields[])
+ op(0, &fields[..])
}
ty::ty_tup(ref v) => {
- op(0, &tup_fields(&v[])[])
+ op(0, &tup_fields(&v[..])[])
}
ty::ty_enum(_, substs) => {
tcx, enum_id, variant_id);
let fields = struct_fields(tcx, variant_id, substs);
let fields = monomorphize::normalize_associated_type(tcx, &fields);
- op(variant_info.disr_val, &fields[])
+ op(variant_info.disr_val, &fields[..])
}
_ => {
tcx.sess.bug("resolve didn't map this expr to a \
_ => ccx.sess().bug("trans_native_call called on non-function type")
};
let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);
- let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[]);
+ let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[..]);
let fn_type = cabi::compute_abi_info(ccx,
&llsig.llarg_tys[],
llsig.llret_ty,
let llforeign_retval = CallWithConv(bcx,
llfn,
- &llargs_foreign[],
+ &llargs_foreign[..],
cc,
Some(attrs),
call_debug_loc);
ccx.tcx().map.path_to_string(id),
id, t.repr(tcx));
- let llfn = base::decl_internal_rust_fn(ccx, t, &ps[]);
+ let llfn = base::decl_internal_rust_fn(ccx, t, &ps[..]);
base::set_llvm_fn_attrs(ccx, attrs, llfn);
base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]);
llfn
if tys.fn_sig.variadic {
Type::variadic_func(&llargument_tys, &llreturn_ty)
} else {
- Type::func(&llargument_tys[], &llreturn_ty)
+ Type::func(&llargument_tys[..], &llreturn_ty)
}
}
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
Some(old_sym) => {
- let glue = decl_cdecl_fn(ccx, &old_sym[], llfnty, ty::mk_nil(ccx.tcx()));
+ let glue = decl_cdecl_fn(ccx, &old_sym[..], llfnty, ty::mk_nil(ccx.tcx()));
(glue, None)
},
None => {
class_did,
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
- let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, DebugLoc::None);
+ let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[..], dtor_ty, DebugLoc::None);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
ccx,
t,
&format!("glue_{}", name)[]);
- let llfn = decl_cdecl_fn(ccx, &fn_nm[], llfnty, ty::mk_nil(ccx.tcx()));
+ let llfn = decl_cdecl_fn(ccx, &fn_nm[..], llfnty, ty::mk_nil(ccx.tcx()));
note_unique_llvm_symbol(ccx, fn_nm.clone());
return (fn_nm, llfn);
}
let name = token::get_ident(foreign_item.ident);
// For `transmute` we can just trans the input expr directly into dest
- if &name[] == "transmute" {
+ if &name[..] == "transmute" {
let llret_ty = type_of::type_of(ccx, ret_ty.unwrap());
match args {
callee::ArgExprs(arg_exprs) => {
let call_debug_location = DebugLoc::At(call_info.id, call_info.span);
// These are the only intrinsic functions that diverge.
- if &name[] == "abort" {
+ if &name[..] == "abort" {
let llfn = ccx.get_intrinsic(&("llvm.trap"));
Call(bcx, llfn, &[], None, call_debug_location);
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
Unreachable(bcx);
return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to()));
- } else if &name[] == "unreachable" {
+ } else if &name[..] == "unreachable" {
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
Unreachable(bcx);
return Result::new(bcx, C_nil(ccx));
};
let simple = get_simple_intrinsic(ccx, &*foreign_item);
- let llval = match (simple, &name[]) {
+ let llval = match (simple, &name[..]) {
(Some(llfn), _) => {
Call(bcx, llfn, &llargs, None, call_debug_location)
}
hash = format!("h{}", state.finish());
ccx.tcx().map.with_path(fn_id.node, |path| {
- exported_name(path, &hash[])
+ exported_name(path, &hash[..])
})
};
let mut hash_id = Some(hash_id);
let mut mk_lldecl = |abi: abi::Abi| {
let lldecl = if abi != abi::Rust {
- foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[])
+ foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[..])
} else {
- decl_internal_rust_fn(ccx, mono_ty, &s[])
+ decl_internal_rust_fn(ccx, mono_ty, &s[..])
};
ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl);
if abi != abi::Rust {
foreign::trans_rust_fn_with_foreign_abi(
ccx, &**decl, &**body, &[], d, psubsts, fn_id.node,
- Some(&hash[]));
+ Some(&hash[..]));
} else {
trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]);
}
trans_enum_variant(ccx,
parent,
&*v,
- &args[],
+ &args[..],
this_tv.disr_val,
psubsts,
d);
let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty));
atys.extend(input_tys);
- Type::func(&atys[], &lloutputtype)
+ Type::func(&atys[..], &lloutputtype)
}
// Given a function type and a count of ty params, construct an llvm type
let repr = adt::represent_type(cx, t);
let tps = substs.types.get_slice(subst::TypeSpace);
let name = llvm_type_name(cx, an_enum, did, tps);
- adt::incomplete_type_of(cx, &*repr, &name[])
+ adt::incomplete_type_of(cx, &*repr, &name[..])
}
ty::ty_closure(did, _, ref substs) => {
// Only create the named struct, but don't fill it in. We
// contents of the VecPerParamSpace to to construct the llvm
// name
let name = llvm_type_name(cx, a_closure, did, substs.types.as_slice());
- adt::incomplete_type_of(cx, &*repr, &name[])
+ adt::incomplete_type_of(cx, &*repr, &name[..])
}
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => {
let repr = adt::represent_type(cx, t);
let tps = substs.types.get_slice(subst::TypeSpace);
let name = llvm_type_name(cx, a_struct, did, tps);
- adt::incomplete_type_of(cx, &*repr, &name[])
+ adt::incomplete_type_of(cx, &*repr, &name[..])
}
}
ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None)
}
ast::TyObjectSum(ref ty, ref bounds) => {
- match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[]) {
+ match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[..]) {
Ok((trait_ref, projection_bounds)) => {
trait_ref_to_object_type(this,
rscope,
ast_ty.span,
trait_ref,
projection_bounds,
- &bounds[])
+ &bounds[..])
}
Err(ErrorReported) => {
this.tcx().types.err
ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn))
}
ast::TyPolyTraitRef(ref bounds) => {
- conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[])
+ conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[..])
}
ast::TyPath(ref path, id) => {
let a_def = match tcx.def_map.borrow().get(&id) {
// Skip the first argument if `self` is present.
&self_and_input_tys[1..]
} else {
- &self_and_input_tys[]
+ &self_and_input_tys[..]
};
let (ior, lfp) = find_implied_output_region(input_tys, input_pats);
ast_bounds: &[ast::TyParamBound])
-> Ty<'tcx>
{
- let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[]);
+ let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[..]);
let mut projection_bounds = Vec::new();
let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() {
check_pat_enum(pcx, pat, &path, Some(&[]), expected);
}
ast::PatEnum(ref path, ref subpats) => {
- let subpats = subpats.as_ref().map(|v| &v[]);
+ let subpats = subpats.as_ref().map(|v| &v[..]);
check_pat_enum(pcx, pat, path, subpats, expected);
}
ast::PatStruct(ref path, ref fields, etc) => {
check_argument_types(fcx,
call_expr.span,
&fn_sig.inputs,
- &expected_arg_tys[],
+ &expected_arg_tys[..],
arg_exprs,
AutorefArgs::No,
fn_sig.variadic,
debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx()));
if applicable_candidates.len() > 1 {
- match self.collapse_candidates_to_trait_pick(&applicable_candidates[]) {
+ match self.collapse_candidates_to_trait_pick(&applicable_candidates[..]) {
Some(pick) => { return Some(Ok(pick)); }
None => { }
}
traits_are = if candidates.len() == 1 {"trait is"} else {"traits are"},
one_of_them = if candidates.len() == 1 {"it"} else {"one of them"});
- fcx.sess().fileline_help(span, &msg[]);
+ fcx.sess().fileline_help(span, &msg[..]);
for (i, trait_did) in candidates.iter().enumerate() {
fcx.sess().fileline_help(span,
one_of_them = if candidates.len() == 1 {"it"} else {"one of them"},
name = method_ustring);
- fcx.sess().fileline_help(span, &msg[]);
+ fcx.sess().fileline_help(span, &msg[..]);
for (i, trait_info) in candidates.iter().enumerate() {
fcx.sess().fileline_help(span,
check_argument_types(fcx,
sp,
- &err_inputs[],
+ &err_inputs[..],
&[],
args_no_rcvr,
autoref_args,
check_argument_types(fcx,
sp,
&fty.sig.0.inputs[1..],
- &expected_arg_tys[],
+ &expected_arg_tys[..],
args_no_rcvr,
autoref_args,
fty.sig.0.variadic,
ty::ty_struct(base_id, substs) => {
debug!("struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id);
- fcx.lookup_field_ty(expr.span, base_id, &fields[],
+ fcx.lookup_field_ty(expr.span, base_id, &fields[..],
field.node.name, &(*substs))
}
_ => None
if tuple_like {
debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id);
- fcx.lookup_tup_field_ty(expr.span, base_id, &fields[],
+ fcx.lookup_tup_field_ty(expr.span, base_id, &fields[..],
idx.node, &(*substs))
} else {
None
class_id,
id,
fcx.ccx.tcx.mk_substs(struct_substs),
- &class_fields[],
+ &class_fields[..],
fields,
base_expr.is_none(),
None);
variant_id,
id,
fcx.ccx.tcx.mk_substs(substitutions),
- &variant_fields[],
+ &variant_fields[..],
fields,
true,
Some(enum_id));
fcx.write_ty(id, fcx.node_ty(b.id));
}
ast::ExprCall(ref callee, ref args) => {
- callee::check_call(fcx, expr, &**callee, &args[], expected);
+ callee::check_call(fcx, expr, &**callee, &args[..], expected);
}
ast::ExprMethodCall(ident, ref tps, ref args) => {
- check_method_call(fcx, expr, ident, &args[], &tps[], expected, lvalue_pref);
+ check_method_call(fcx, expr, ident, &args[..], &tps[..], expected, lvalue_pref);
let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
let args_err = arg_tys.fold(false,
|rest_err, a| {
ast::ExprTup(ref elts) => {
let flds = expected.only_has_type(fcx).and_then(|ty| {
match ty.sty {
- ty::ty_tup(ref flds) => Some(&flds[]),
+ ty::ty_tup(ref flds) => Some(&flds[..]),
_ => None
}
});
let struct_id = match def {
Some(def::DefVariant(enum_id, variant_id, true)) => {
check_struct_enum_variant(fcx, id, expr.span, enum_id,
- variant_id, &fields[]);
+ variant_id, &fields[..]);
enum_id
}
Some(def::DefTrait(def_id)) => {
pprust::path_to_string(path));
check_struct_fields_on_error(fcx,
id,
- &fields[],
+ &fields[..],
base_expr);
def_id
},
id,
expr.span,
struct_did,
- &fields[],
+ &fields[..],
base_expr.as_ref().map(|e| &**e));
}
_ => {
pprust::path_to_string(path));
check_struct_fields_on_error(fcx,
id,
- &fields[],
+ &fields[..],
base_expr);
}
}
}
};
(n_tps, inputs, ty::FnConverging(output))
- } else if &name[] == "abort" || &name[] == "unreachable" {
+ } else if &name[..] == "abort" || &name[..] == "unreachable" {
(0, Vec::new(), ty::FnDiverging)
} else {
- let (n_tps, inputs, output) = match &name[] {
+ let (n_tps, inputs, output) = match &name[..] {
"breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)),
"size_of" |
"pref_align_of" | "min_align_of" => (1, Vec::new(), ccx.tcx.types.uint),
"get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
- Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[]); }
+ Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[..]); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,
};
let len = self.region_bound_pairs.len();
- self.relate_free_regions(&fn_sig[], body.id);
+ self.relate_free_regions(&fn_sig[..], body.id);
link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[]);
self.visit_block(body);
self.visit_region_obligations(body.id);
}
ast::ExprMatch(ref discr, ref arms, _) => {
- link_match(rcx, &**discr, &arms[]);
+ link_match(rcx, &**discr, &arms[..]);
visit::walk_expr(rcx, expr);
}
ast::TupleVariantKind(ref args) if args.len() > 0 => {
let rs = ExplicitRscope;
let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect();
- ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[], enum_scheme.ty)
+ ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[..], enum_scheme.ty)
}
ast::TupleVariantKind(_) => {
trait_id,
&trait_def.generics,
&trait_predicates,
- &trait_items[],
+ &trait_items[..],
&m.id,
&m.ident.name,
&m.explicit_self,
trait_id,
&trait_def.generics,
&trait_predicates,
- &trait_items[],
+ &trait_items[..],
&m.id,
&m.pe_ident().name,
m.pe_explicit_self(),
local_def(field.node.id)].ty).collect();
let ctor_fn_ty = ty::mk_ctor_fn(tcx,
local_def(ctor_id),
- &inputs[],
+ &inputs[..],
selfty);
write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty);
tcx.tcache.borrow_mut().insert(local_def(ctor_id),
let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics);
ty_generics(ccx,
subst::FnSpace,
- &early_lifetimes[],
+ &early_lifetimes[..],
&generics.ty_params[],
&generics.where_clause,
base_generics)
// attribute and report an error with various results if found.
if ty::has_attr(tcx, item_def_id, "rustc_variance") {
let found = item_variances.repr(tcx);
- span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[]);
+ span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[..]);
}
let newly_added = tcx.item_variance_map.borrow_mut()
class,
id,
&mut out).unwrap();
- String::from_utf8_lossy(&out[]).into_owned()
+ String::from_utf8_lossy(&out[..]).into_owned()
}
/// Exhausts the `lexer` writing the output into `out`.
/// Returns None otherwise.
pub fn as_string<'a>(&'a self) -> Option<&'a str> {
match *self {
- Json::String(ref s) => Some(&s[]),
+ Json::String(ref s) => Some(&s[..]),
_ => None
}
}
return Err(ExpectedError("String or Object".to_string(), format!("{}", json)))
}
};
- let idx = match names.iter().position(|n| *n == &name[]) {
+ let idx = match names.iter().position(|n| *n == &name[..]) {
Some(idx) => idx,
None => return Err(UnknownVariantError(name))
};
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(&mem_buf[]).unwrap();
+ let json_str = from_utf8(&mem_buf[..]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(&mem_buf[]).unwrap();
+ let json_str = from_utf8(&mem_buf[..]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
write!(&mut writer, "{}",
super::as_pretty_json(&json).indent(i)).unwrap();
- let printed = from_utf8(&writer[]).unwrap();
+ let printed = from_utf8(&writer[..]).unwrap();
// Check for indents at each line
let lines: Vec<&str> = printed.lines().collect();
let mut map = HashMap::new();
map.insert(Enum::Foo, 0);
let result = json::encode(&map).unwrap();
- assert_eq!(&result[], r#"{"Foo":0}"#);
+ assert_eq!(&result[..], r#"{"Foo":0}"#);
let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
assert_eq!(map, decoded);
}
impl Encodable for String {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_str(&self[])
+ s.emit_str(&self[..])
}
}
#[inline]
fn deref(&self) -> &OsStr {
- &self[]
+ &self[..]
}
}
}
impl BorrowFrom<OsString> for OsStr {
- fn borrow_from(owned: &OsString) -> &OsStr { &owned[] }
+ fn borrow_from(owned: &OsString) -> &OsStr { &owned[..] }
}
impl ToOwned<OsString> for OsStr {
impl AsOsStr for OsString {
fn as_os_str(&self) -> &OsStr {
- &self[]
+ &self[..]
}
}
impl AsOsStr for String {
fn as_os_str(&self) -> &OsStr {
- OsStr::from_str(&self[])
+ OsStr::from_str(&self[..])
}
}
#[test]
fn read_char_buffered() {
let buf = [195u8, 159u8];
- let mut reader = BufReader::with_capacity(1, &buf[]);
+ let mut reader = BufReader::with_capacity(1, &buf[..]);
assert_eq!(reader.chars().next(), Some(Ok('ß')));
}
#[test]
fn test_chars() {
let buf = [195u8, 159u8, b'a'];
- let mut reader = BufReader::with_capacity(1, &buf[]);
+ let mut reader = BufReader::with_capacity(1, &buf[..]);
let mut it = reader.chars();
assert_eq!(it.next(), Some(Ok('ß')));
assert_eq!(it.next(), Some(Ok('a')));
fn test_buf_writer() {
let mut buf = [0 as u8; 9];
{
- let mut writer = Cursor::new(&mut buf[]);
+ let mut writer = Cursor::new(&mut buf[..]);
assert_eq!(writer.position(), 0);
assert_eq!(writer.write(&[0]), Ok(1));
assert_eq!(writer.position(), 1);
fn test_buf_writer_seek() {
let mut buf = [0 as u8; 8];
{
- let mut writer = Cursor::new(&mut buf[]);
+ let mut writer = Cursor::new(&mut buf[..]);
assert_eq!(writer.position(), 0);
assert_eq!(writer.write(&[1]), Ok(1));
assert_eq!(writer.position(), 1);
#[test]
fn test_buf_writer_error() {
let mut buf = [0 as u8; 2];
- let mut writer = Cursor::new(&mut buf[]);
+ let mut writer = Cursor::new(&mut buf[..]);
assert_eq!(writer.write(&[0]), Ok(1));
assert_eq!(writer.write(&[0, 0]), Ok(1));
assert_eq!(writer.write(&[0, 0]), Ok(0));
#[test]
fn seek_past_end() {
let buf = [0xff];
- let mut r = Cursor::new(&buf[]);
+ let mut r = Cursor::new(&buf[..]);
assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10));
assert_eq!(r.read(&mut [0]), Ok(0));
assert_eq!(r.read(&mut [0]), Ok(0));
let mut buf = [0];
- let mut r = Cursor::new(&mut buf[]);
+ let mut r = Cursor::new(&mut buf[..]);
assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10));
assert_eq!(r.write(&[3]), Ok(0));
}
#[test]
fn seek_before_0() {
let buf = [0xff_u8];
- let mut r = Cursor::new(&buf[]);
+ let mut r = Cursor::new(&buf[..]);
assert!(r.seek(SeekFrom::End(-2)).is_err());
let mut r = Cursor::new(vec!(10u8));
assert!(r.seek(SeekFrom::End(-2)).is_err());
let mut buf = [0];
- let mut r = Cursor::new(&mut buf[]);
+ let mut r = Cursor::new(&mut buf[..]);
assert!(r.seek(SeekFrom::End(-2)).is_err());
}
assert_eq!(a, &w.get_ref()[]);
let w = w.into_inner();
let a: &[_] = &[0, 1];
- assert_eq!(a, &w[]);
+ assert_eq!(a, &w[..]);
}
// This is just here to make sure that we don't infinite loop in the
#[test]
fn read_char_buffered() {
let buf = [195u8, 159u8];
- let mut reader = BufferedReader::with_capacity(1, &buf[]);
+ let mut reader = BufferedReader::with_capacity(1, &buf[..]);
assert_eq!(reader.read_char(), Ok('ß'));
}
#[test]
fn test_chars() {
let buf = [195u8, 159u8, b'a'];
- let mut reader = BufferedReader::with_capacity(1, &buf[]);
+ let mut reader = BufferedReader::with_capacity(1, &buf[..]);
let mut it = reader.chars();
assert_eq!(it.next(), Some(Ok('ß')));
assert_eq!(it.next(), Some(Ok('a')));
}
#[inline]
fn container_as_str(&self) -> Option<&str> {
- Some(&self[])
+ Some(&self[..])
}
#[inline]
fn is_str(_: Option<&String>) -> bool { true }
impl BytesContainer for Vec<u8> {
#[inline]
fn container_as_bytes(&self) -> &[u8] {
- &self[]
+ &self[..]
}
}
s.push_str("..");
s.push(SEP);
s.push_str(filename);
- self.update_normalized(&s[]);
+ self.update_normalized(&s[..]);
}
None => {
self.update_normalized(filename);
s.push_str(&self.repr[..end]);
s.push(SEP);
s.push_str(filename);
- self.update_normalized(&s[]);
+ self.update_normalized(&s[..]);
}
Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
let mut s = String::with_capacity(idxb + filename.len());
s.push_str(&self.repr[..idxb]);
s.push_str(filename);
- self.update_normalized(&s[]);
+ self.update_normalized(&s[..]);
}
Some((idxb,_,_)) => {
let mut s = String::with_capacity(idxb + 1 + filename.len());
s.push_str(&self.repr[..idxb]);
s.push(SEP);
s.push_str(filename);
- self.update_normalized(&s[]);
+ self.update_normalized(&s[..]);
}
}
}
}
fn shares_volume(me: &Path, path: &str) -> bool {
// path is assumed to have a prefix of Some(DiskPrefix)
- let repr = &me.repr[];
+ let repr = &me.repr[..];
match me.prefix {
Some(DiskPrefix) => {
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase()
else { None };
let pathlen = path_.as_ref().map_or(path.len(), |p| p.len());
let mut s = String::with_capacity(me.repr.len() + 1 + pathlen);
- s.push_str(&me.repr[]);
+ s.push_str(&me.repr[..]);
let plen = me.prefix_len();
// if me is "C:" we don't want to add a path separator
match me.prefix {
}
match path_ {
None => s.push_str(path),
- Some(p) => s.push_str(&p[]),
+ Some(p) => s.push_str(&p[..]),
};
- me.update_normalized(&s[])
+ me.update_normalized(&s[..])
}
if !path.is_empty() {
/// Always returns a `Some` value.
#[inline]
fn as_str<'a>(&'a self) -> Option<&'a str> {
- Some(&self.repr[])
+ Some(&self.repr[..])
}
#[inline]
/// Always returns a `Some` value.
fn dirname_str<'a>(&'a self) -> Option<&'a str> {
Some(match self.sepidx_or_prefix_len() {
- None if ".." == self.repr => &self.repr[],
+ None if ".." == self.repr => &self.repr[..],
None => ".",
Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
- &self.repr[]
+ &self.repr[..]
}
Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => {
- &self.repr[]
+ &self.repr[..]
}
Some((0,idxa,_)) => &self.repr[..idxa],
Some((idxb,idxa,_)) => {
/// See `GenericPath::filename_str` for info.
/// Always returns a `Some` value if `filename` returns a `Some` value.
fn filename_str<'a>(&'a self) -> Option<&'a str> {
- let repr = &self.repr[];
+ let repr = &self.repr[..];
match self.sepidx_or_prefix_len() {
None if "." == repr || ".." == repr => None,
None => Some(repr),
/// Does not distinguish between absolute and cwd-relative paths, e.g.
/// C:\foo and C:foo.
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
- let repr = &self.repr[];
+ let repr = &self.repr[..];
let s = match self.prefix {
Some(_) => {
let plen = self.prefix_len();
}
fn equiv_prefix(&self, other: &Path) -> bool {
- let s_repr = &self.repr[];
- let o_repr = &other.repr[];
+ let s_repr = &self.repr[..];
+ let o_repr = &other.repr[..];
match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() &&
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
&self.repr[..self.repr.len()-1]
- } else { &self.repr[] };
+ } else { &self.repr[..] };
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
is_sep
} else {
/// non-verbatim, the non-verbatim version is returned.
/// Otherwise, None is returned.
pub fn make_non_verbatim(path: &Path) -> Option<Path> {
- let repr = &path.repr[];
+ let repr = &path.repr[..];
let new_path = match path.prefix {
Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None,
Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()),
let msg = match obj.downcast_ref::<&'static str>() {
Some(s) => *s,
None => match obj.downcast_ref::<String>() {
- Some(s) => &s[],
+ Some(s) => &s[..],
None => "Box<Any>",
}
};
type Target = Path;
fn deref(&self) -> &Path {
- unsafe { mem::transmute(&self.inner[]) }
+ unsafe { mem::transmute(&self.inner[..]) }
}
}
impl AsOsStr for PathBuf {
fn as_os_str(&self) -> &OsStr {
- &self.inner[]
+ &self.inner[..]
}
}
let mut n = stack_buf.len();
loop {
let buf = if n <= stack_buf.len() {
- &mut stack_buf[]
+ &mut stack_buf[..]
} else {
let extra = n - heap_buf.len();
heap_buf.reserve(extra);
heap_buf.set_len(n);
- &mut heap_buf[]
+ &mut heap_buf[..]
};
// This function is typically called on windows API functions which
let (k, v) = match s.iter().position(|&b| b == '=' as u16) {
Some(n) => (&s[..n], &s[n+1..]),
- None => (s, &[][]),
+ None => (s, &[][..]),
};
Some((OsStringExt::from_wide(k), OsStringExt::from_wide(v)))
}
if !must_yield && in_progress.is_empty() {
None
} else {
- Some(super::os2path(&in_progress[]))
+ Some(super::os2path(&in_progress[..]))
}
}
}
return Err(JoinPathsError)
} else if v.contains(&sep) {
joined.push(b'"' as u16);
- joined.push_all(&v[]);
+ joined.push_all(&v[..]);
joined.push(b'"' as u16);
} else {
- joined.push_all(&v[]);
+ joined.push_all(&v[..]);
}
}
- Ok(OsStringExt::from_wide(&joined[]))
+ Ok(OsStringExt::from_wide(&joined[..]))
}
impl fmt::Display for JoinPathsError {
"echo \"a b c\""
);
assert_eq!(
- test_wrapper("\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}", &[]),
+ test_wrapper("\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}", &[..]),
"\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}"
);
}
impl Decodable for Ident {
fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> {
- Ok(str_to_ident(&try!(d.read_str())[]))
+ Ok(str_to_ident(&try!(d.read_str())[..]))
}
}
if !s.is_empty() {
s.push_str("::");
}
- s.push_str(&e[]);
+ s.push_str(&e[..]);
s
})
}
F: FnOnce(Option<&[Attribute]>) -> T,
{
let attrs = match self.get(id) {
- NodeItem(i) => Some(&i.attrs[]),
- NodeForeignItem(fi) => Some(&fi.attrs[]),
+ NodeItem(i) => Some(&i.attrs[..]),
+ NodeForeignItem(fi) => Some(&fi.attrs[..]),
NodeTraitItem(ref tm) => match **tm {
- RequiredMethod(ref type_m) => Some(&type_m.attrs[]),
- ProvidedMethod(ref m) => Some(&m.attrs[]),
- TypeTraitItem(ref typ) => Some(&typ.attrs[]),
+ RequiredMethod(ref type_m) => Some(&type_m.attrs[..]),
+ ProvidedMethod(ref m) => Some(&m.attrs[..]),
+ TypeTraitItem(ref typ) => Some(&typ.attrs[..]),
},
NodeImplItem(ref ii) => {
match **ii {
- MethodImplItem(ref m) => Some(&m.attrs[]),
- TypeImplItem(ref t) => Some(&t.attrs[]),
+ MethodImplItem(ref m) => Some(&m.attrs[..]),
+ TypeImplItem(ref t) => Some(&t.attrs[..]),
}
}
- NodeVariant(ref v) => Some(&v.node.attrs[]),
+ NodeVariant(ref v) => Some(&v.node.attrs[..]),
// unit/tuple structs take the attributes straight from
// the struct definition.
// FIXME(eddyb) make this work again (requires access to the map).
None => return false,
Some((node_id, name)) => (node_id, name),
};
- if &part[] != mod_name.as_str() {
+ if &part[..] != mod_name.as_str() {
return false;
}
cursor = self.map.get_parent(mod_id);
// We are looking at some node `n` with a given name and parent
// id; do their names match what I am seeking?
fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool {
- name.as_str() == &self.item_name[] &&
+ name.as_str() == &self.item_name[..] &&
self.suffix_matches(parent_of_n)
}
}
fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
let id_str = format!(" (id={})", id);
- let id_str = if include_id { &id_str[] } else { "" };
+ let id_str = if include_id { &id_str[..] } else { "" };
match map.find(id) {
Some(NodeItem(item)) => {
match *trait_ref {
Some(ref trait_ref) => {
pretty.push('.');
- pretty.push_str(&pprust::path_to_string(&trait_ref.path)[]);
+ pretty.push_str(&pprust::path_to_string(&trait_ref.path));
}
None => {}
}
- token::gensym_ident(&pretty[])
+ token::gensym_ident(&pretty[..])
}
pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod {
pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
(a.span == b.span)
&& (a.global == b.global)
- && (segments_name_eq(&a.segments[], &b.segments[]))
+ && (segments_name_eq(&a.segments[..], &b.segments[..]))
}
// are two arrays of segments equal when compared unhygienically?
pub trait AttrMetaMethods {
fn check_name(&self, name: &str) -> bool {
- name == &self.name()[]
+ name == &self.name()[..]
}
/// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`,
impl AttrMetaMethods for Attribute {
fn check_name(&self, name: &str) -> bool {
- let matches = name == &self.name()[];
+ let matches = name == &self.name()[..];
if matches {
mark_used(self);
}
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
match self.node {
- MetaList(_, ref l) => Some(&l[]),
+ MetaList(_, ref l) => Some(&l[..]),
_ => None
}
}
let meta = mk_name_value_item_str(
InternedString::new("doc"),
token::intern_and_get_ident(&strip_doc_comment_decoration(
- &comment)[]));
+ &comment)));
if self.node.style == ast::AttrOuter {
f(&mk_attr_outer(self.node.id, meta))
} else {
}
MetaList(ref n, ref items) if *n == "inline" => {
mark_used(attr);
- if contains_name(&items[], "always") {
+ if contains_name(&items[..], "always") {
InlineAlways
- } else if contains_name(&items[], "never") {
+ } else if contains_name(&items[..], "never") {
InlineNever
} else {
InlineHint
/// Tests if a cfg-pattern matches the cfg set
pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool {
match cfg.node {
- ast::MetaList(ref pred, ref mis) if &pred[] == "any" =>
+ ast::MetaList(ref pred, ref mis) if &pred[..] == "any" =>
mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
- ast::MetaList(ref pred, ref mis) if &pred[] == "all" =>
+ ast::MetaList(ref pred, ref mis) if &pred[..] == "all" =>
mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
- ast::MetaList(ref pred, ref mis) if &pred[] == "not" => {
+ ast::MetaList(ref pred, ref mis) if &pred[..] == "not" => {
if mis.len() != 1 {
diagnostic.span_err(cfg.span, "expected 1 cfg-pattern");
return false;
'outer: for attr in attrs {
let tag = attr.name();
- let tag = &tag[];
+ let tag = &tag[..];
if tag != "deprecated" && tag != "unstable" && tag != "stable" {
continue // not a stability level
}
}
}
}
- if &meta.name()[] == "since" {
+ if &meta.name()[..] == "since" {
match meta.value_str() {
Some(v) => since = Some(v),
None => {
}
}
}
- if &meta.name()[] == "reason" {
+ if &meta.name()[..] == "reason" {
match meta.value_str() {
Some(v) => reason = Some(v),
None => {
if !set.insert(name.clone()) {
diagnostic.span_fatal(meta.span,
- &format!("duplicate meta item `{}`", name)[]);
+ &format!("duplicate meta item `{}`", name));
}
}
}
for item in items {
match item.node {
ast::MetaWord(ref word) => {
- let hint = match &word[] {
+ let hint = match &word[..] {
// Can't use "extern" because it's not a lexical identifier.
"C" => Some(ReprExtern),
"packed" => Some(ReprPacked),
let mut src = if src.starts_with("\u{feff}") {
String::from_str(&src[3..])
} else {
- String::from_str(&src[])
+ String::from_str(&src[..])
};
// Append '\n' in case it's not already there.
}
};
- if attr::cfg_matches(self.diag, &self.config[], &cfg) {
+ if attr::cfg_matches(self.diag, &self.config[..], &cfg) {
Some(respan(mi.span, ast::Attribute_ {
id: attr::mk_attr_id(),
style: attr.node.style,
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
- self.span_bug(sp, &format!("unimplemented {}", msg)[]);
+ self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
self.err_count.get());
}
}
- self.fatal(&s[]);
+ self.fatal(&s[..]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
- self.bug(&format!("unimplemented {}", msg)[]);
+ self.bug(&format!("unimplemented {}", msg));
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
// the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
let ses = cm.span_to_string(span_end);
- try!(print_diagnostic(dst, &ses[], lvl, msg, code));
+ try!(print_diagnostic(dst, &ses[..], lvl, msg, code));
if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
} else {
- try!(print_diagnostic(dst, &ss[], lvl, msg, code));
+ try!(print_diagnostic(dst, &ss[..], lvl, msg, code));
if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
}
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
- try!(print_diagnostic(dst, &ss[], Help,
+ try!(print_diagnostic(dst, &ss[..], Help,
&format!("pass `--explain {}` to see a detailed \
explanation", code)[], None));
}
let fm = &*lines.file;
let mut elided = false;
- let mut display_lines = &lines.lines[];
+ let mut display_lines = &lines.lines[..];
if display_lines.len() > MAX_LINES {
display_lines = &display_lines[0..MAX_LINES];
elided = true;
-> old_io::IoResult<()> {
let fm = &*lines.file;
- let lines = &lines.lines[];
+ let lines = &lines.lines[..];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0]) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
s.push('^');
s.push('\n');
print_maybe_styled(w,
- &s[],
+ &s[..],
term::attr::ForegroundColor(lvl.color()))
}
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!")
};
- try!(print_diagnostic(w, &ss[], Note,
+ try!(print_diagnostic(w, &ss[..], Note,
&format!("in expansion of {}{}{}", pre,
ei.callee.name,
post)[], None));
let ss = cm.span_to_string(ei.call_site);
- try!(print_diagnostic(w, &ss[], Note, "expansion site", None));
+ try!(print_diagnostic(w, &ss[..], Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
{
match opt {
Some(t) => t,
- None => diag.handler().bug(&msg()[]),
+ None => diag.handler().bug(&msg()),
}
}
Some(previous_span) => {
ecx.span_warn(span, &format!(
"diagnostic code {} already used", &token::get_ident(code)
- )[]);
+ ));
ecx.span_note(previous_span, "previous invocation");
},
None => ()
if !diagnostics.contains_key(&code.name) {
ecx.span_err(span, &format!(
"used diagnostic code {} not registered", &token::get_ident(code)
- )[]);
+ ));
}
});
MacExpr::new(quote_expr!(ecx, ()))
if diagnostics.insert(code.name, description).is_some() {
ecx.span_err(span, &format!(
"diagnostic code {} already registered", &token::get_ident(*code)
- )[]);
+ ));
}
});
let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + &token::get_ident(*code)
- )[]));
+ )));
MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
}
}
base::MacExpr::new(cx.expr_str(
sp,
- token::intern_and_get_ident(&accumulator[])))
+ token::intern_and_get_ident(&accumulator[..])))
}
}
}
}
- let res = str_to_ident(&res_str[]);
+ let res = str_to_ident(&res_str[..]);
let e = P(ast::Expr {
id: ast::DUMMY_NODE_ID,
{
let name = match mitem.node {
MetaWord(ref tname) => {
- match &tname[] {
+ match &tname[..] {
"Copy" => "Copy",
"Send" | "Sync" => {
return cx.span_err(span,
self,
struct_def,
type_ident,
- &self_args[],
- &nonself_args[])
+ &self_args[..],
+ &nonself_args[..])
} else {
method_def.expand_struct_method_body(cx,
self,
struct_def,
type_ident,
- &self_args[],
- &nonself_args[])
+ &self_args[..],
+ &nonself_args[..])
};
method_def.create_method(cx,
self,
enum_def,
type_ident,
- &self_args[],
- &nonself_args[])
+ &self_args[..],
+ &nonself_args[..])
} else {
method_def.expand_enum_method_body(cx,
self,
enum_def,
type_ident,
self_args,
- &nonself_args[])
+ &nonself_args[..])
};
method_def.create_method(cx,
.collect::<Vec<String>>();
let self_arg_idents = self_arg_names.iter()
- .map(|name|cx.ident_of(&name[]))
+ .map(|name|cx.ident_of(&name[..]))
.collect::<Vec<ast::Ident>>();
// The `vi_idents` will be bound, solely in the catch-all, to
// a series of let statements mapping each self_arg to a usize
// corresponding to its variant index.
let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
- .map(|name| { let vi_suffix = format!("{}_vi", &name[]);
- cx.ident_of(&vi_suffix[]) })
+ .map(|name| { let vi_suffix = format!("{}_vi", &name[..]);
+ cx.ident_of(&vi_suffix[..]) })
.collect::<Vec<ast::Ident>>();
// Builds, via callback to call_substructure_method, the
// delegated expression that handles the catch-all case,
// using `__variants_tuple` to drive logic if necessary.
let catch_all_substructure = EnumNonMatchingCollapsed(
- self_arg_idents, &variants[], &vi_idents[]);
+ self_arg_idents, &variants[..], &vi_idents[..]);
// These arms are of the form:
// (Variant1, Variant1, ...) => Body1
idents
};
for self_arg_name in self_arg_names.tail() {
- let (p, idents) = mk_self_pat(cx, &self_arg_name[]);
+ let (p, idents) = mk_self_pat(cx, &self_arg_name[..]);
subpats.push(p);
self_pats_idents.push(idents);
}
&**variant,
field_tuples);
let arm_expr = self.call_substructure_method(
- cx, trait_, type_ident, &self_args[], nonself_args,
+ cx, trait_, type_ident, &self_args[..], nonself_args,
&substructure);
cx.arm(sp, vec![single_pat], arm_expr)
}
let arm_expr = self.call_substructure_method(
- cx, trait_, type_ident, &self_args[], nonself_args,
+ cx, trait_, type_ident, &self_args[..], nonself_args,
&catch_all_substructure);
// Builds the expression:
}
},
EnumNonMatchingCollapsed(ref all_args, _, tuple) =>
- enum_nonmatch_f(cx, trait_span, (&all_args[], tuple),
+ enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple),
substructure.nonself_args),
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span, "static function in `derive`")
f(cx, trait_span, called)
},
EnumNonMatchingCollapsed(ref all_self_args, _, tuple) =>
- enum_nonmatch_f(cx, trait_span, (&all_self_args[], tuple),
+ enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple),
substructure.nonself_args),
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span, "static function in `derive`")
|i| push(i)))
}
- match &tname[] {
+ match &tname[..] {
"Clone" => expand!(clone::expand_deriving_clone),
"Hash" => expand!(hash::expand_deriving_hash),
let formatter = substr.nonself_args[0].clone();
let meth = cx.ident_of("write_fmt");
- let s = token::intern_and_get_ident(&format_string[]);
+ let s = token::intern_and_get_ident(&format_string[..]);
let format_string = cx.expr_str(span, s);
// phew, not our responsibility any more!
Some(v) => v
};
- let e = match env::var(&var[]) {
+ let e = match env::var(&var[..]) {
Err(..) => {
cx.expr_path(cx.path_all(sp,
true,
cx.ident_of("Some")),
vec!(cx.expr_str(sp,
token::intern_and_get_ident(
- &s[]))))
+ &s[..]))))
}
};
MacExpr::new(e)
}
}
- let e = match env::var(&var[]) {
+ let e = match env::var(&var[..]) {
Err(_) => {
cx.span_err(sp, &msg);
cx.expr_usize(sp, 0)
},
});
let fm = fresh_mark();
- let marked_before = mark_tts(&tts[], fm);
+ let marked_before = mark_tts(&tts[..], fm);
// The span that we pass to the expanders we want to
// be the root of the call stack. That's the most
let opt_parsed = {
let expanded = expandfun.expand(fld.cx,
mac_span,
- &marked_before[]);
+ &marked_before[..]);
parse_thunk(expanded)
};
let parsed = match opt_parsed {
fld.cx.span_err(
pth.span,
&format!("non-expression macro in expression position: {}",
- &extnamestr[]
+ &extnamestr[..]
)[]);
return None;
}
}
});
// mark before expansion:
- let marked_before = mark_tts(&tts[], fm);
- expander.expand(fld.cx, it.span, &marked_before[])
+ let marked_before = mark_tts(&tts[..], fm);
+ expander.expand(fld.cx, it.span, &marked_before[..])
}
IdentTT(ref expander, span) => {
if it.ident.name == parse::token::special_idents::invalid.name {
}
});
// mark before expansion:
- let marked_tts = mark_tts(&tts[], fm);
+ let marked_tts = mark_tts(&tts[..], fm);
expander.expand(fld.cx, it.span, it.ident, marked_tts)
}
MacroRulesTT => {
});
let fm = fresh_mark();
- let marked_before = mark_tts(&tts[], fm);
+ let marked_before = mark_tts(&tts[..], fm);
let mac_span = fld.cx.original_span();
let expanded = match expander.expand(fld.cx,
mac_span,
- &marked_before[]).make_pat() {
+ &marked_before[..]).make_pat() {
Some(e) => e,
None => {
fld.cx.span_err(
if valid_ident {
fld.cx.mod_push(it.ident);
}
- let macro_use = contains_macro_use(fld, &new_attrs[]);
+ let macro_use = contains_macro_use(fld, &new_attrs[..]);
let result = with_exts_frame!(fld.cx.syntax_env,
macro_use,
noop_fold_item(it, fld));
node: match node {
MacInvocTT(path, tts, ctxt) => {
MacInvocTT(self.fold_path(path),
- self.fold_tts(&tts[]),
+ self.fold_tts(&tts[..]),
mtwt::apply_mark(self.mark, ctxt))
}
},
.collect();
println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name);
let string = token::get_ident(final_varref_ident);
- println!("varref's first segment's string: \"{}\"", &string[]);
+ println!("varref's first segment's string: \"{}\"", &string[..]);
println!("binding #{}: {}, resolves to {}",
binding_idx, bindings[binding_idx], binding_name);
mtwt::with_sctable(|x| mtwt::display_sctable(x));
let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| {
let ident = token::get_ident(**b);
- let string = &ident[];
+ let string = &ident[..];
"xx" == string
}).collect();
- let cxbinds: &[&ast::Ident] = &cxbinds[];
+ let cxbinds: &[&ast::Ident] = &cxbinds[..];
let cxbind = match cxbinds {
[b] => b,
_ => panic!("expected just one binding for ext_cx")
}
};
let interned_name = token::get_ident(ident);
- let name = &interned_name[];
+ let name = &interned_name[..];
p.expect(&token::Eq);
let e = p.parse_expr();
let msg = format!("invalid reference to argument `{}` ({})",
arg, self.describe_num_args());
- self.ecx.span_err(self.fmtsp, &msg[]);
+ self.ecx.span_err(self.fmtsp, &msg[..]);
return;
}
{
Some(e) => e.span,
None => {
let msg = format!("there is no argument named `{}`", name);
- self.ecx.span_err(self.fmtsp, &msg[]);
+ self.ecx.span_err(self.fmtsp, &msg[..]);
return;
}
};
-> P<ast::Expr> {
let trait_ = match *ty {
Known(ref tyname) => {
- match &tyname[] {
+ match &tyname[..] {
"" => "Display",
"?" => "Debug",
"e" => "LowerExp",
for i in 0..tt.len() {
seq.push(tt.get_tt(i));
}
- mk_tts(cx, &seq[])
+ mk_tts(cx, &seq[..])
}
ast::TtToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut vector = vec!(stmt_let_sp, stmt_let_tt);
- vector.extend(mk_tts(cx, &tts[]).into_iter());
+ vector.extend(mk_tts(cx, &tts[..]).into_iter());
let block = cx.expr_block(
cx.block_all(sp,
vector,
-> Box<base::MacResult+'static> {
let s = pprust::tts_to_string(tts);
base::MacExpr::new(cx.expr_str(sp,
- token::intern_and_get_ident(&s[])))
+ token::intern_and_get_ident(&s[..])))
}
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
.connect("::");
base::MacExpr::new(cx.expr_str(
sp,
- token::intern_and_get_ident(&string[])))
+ token::intern_and_get_ident(&string[..])))
}
/// include! : parse the given file as an expr
// Add this input file to the code map to make it available as
// dependency information
let filename = format!("{}", file.display());
- let interned = token::intern_and_get_ident(&src[]);
+ let interned = token::intern_and_get_ident(&src[..]);
cx.codemap().new_filemap(filename, src);
base::MacExpr::new(cx.expr_str(sp, interned))
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
-> Box<MatcherPos> {
- let match_idx_hi = count_names(&ms[]);
+ let match_idx_hi = count_names(&ms[..]);
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
box MatcherPos {
stack: vec![],
rdr: TtReader,
ms: Vec<TokenTree> )
-> HashMap<Ident, Rc<NamedMatch>> {
- match parse(sess, cfg, rdr, &ms[]) {
+ match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m,
Failure(sp, str) => {
- sess.span_diagnostic.span_fatal(sp, &str[])
+ sess.span_diagnostic.span_fatal(sp, &str[..])
}
Error(sp, str) => {
- sess.span_diagnostic.span_fatal(sp, &str[])
+ sess.span_diagnostic.span_fatal(sp, &str[..])
}
}
}
for dv in &mut (&mut eof_eis[0]).matches {
v.push(dv.pop().unwrap());
}
- return Success(nameize(sess, ms, &v[]));
+ return Success(nameize(sess, ms, &v[..]));
} else if eof_eis.len() > 1 {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
_ => {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}",
- &token_str[])[])
+ &token_str[..])[])
}
},
"path" => {
following",
token_str);
let span = parser.span;
- parser.span_err(span, &msg[]);
+ parser.span_err(span, &msg[..]);
}
}
}
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
- Error(sp, ref msg) => cx.span_fatal(sp, &msg[])
+ Error(sp, ref msg) => cx.span_fatal(sp, &msg[..])
}
}
_ => cx.bug("non-matcher found in parsed lhses")
}
}
- cx.span_fatal(best_fail_spot, &best_fail_msg[]);
+ cx.span_fatal(best_fail_spot, &best_fail_msg[..]);
}
// Note that macro-by-example's input is also matched against a token tree:
}
LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- r.sp_diag.span_fatal(sp.clone(), &msg[]);
+ r.sp_diag.span_fatal(sp.clone(), &msg[..]);
}
LisConstraint(len, _) => {
if len == 0 {
diag.span_err(span, explain);
diag.span_help(span, &format!("add #![feature({})] to the \
crate attributes to enable",
- feature)[]);
+ feature));
}
pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: &str) {
if diag.handler.can_emit_warnings {
diag.span_help(span, &format!("add #![feature({})] to the \
crate attributes to silence this warning",
- feature)[]);
+ feature));
}
}
fn visit_item(&mut self, i: &ast::Item) {
match i.node {
ast::ItemExternCrate(_) => {
- if attr::contains_name(&i.attrs[], "macro_reexport") {
+ if attr::contains_name(&i.attrs[..], "macro_reexport") {
self.gate_feature("macro_reexport", i.span,
"macros reexports are experimental \
and possibly buggy");
}
ast::ItemForeignMod(ref foreign_module) => {
- if attr::contains_name(&i.attrs[], "link_args") {
+ if attr::contains_name(&i.attrs[..], "link_args") {
self.gate_feature("link_args", i.span,
"the `link_args` attribute is not portable \
across platforms, it is recommended to \
}
ast::ItemFn(..) => {
- if attr::contains_name(&i.attrs[], "plugin_registrar") {
+ if attr::contains_name(&i.attrs[..], "plugin_registrar") {
self.gate_feature("plugin_registrar", i.span,
"compiler plugins are experimental and possibly buggy");
}
- if attr::contains_name(&i.attrs[], "start") {
+ if attr::contains_name(&i.attrs[..], "start") {
self.gate_feature("start", i.span,
"a #[start] function is an experimental \
feature whose signature may change \
over time");
}
- if attr::contains_name(&i.attrs[], "main") {
+ if attr::contains_name(&i.attrs[..], "main") {
self.gate_feature("main", i.span,
"declaration of a nonstandard #[main] \
function may change over time, for now \
}
ast::ItemStruct(..) => {
- if attr::contains_name(&i.attrs[], "simd") {
+ if attr::contains_name(&i.attrs[..], "simd") {
self.gate_feature("simd", i.span,
"SIMD types are experimental and possibly buggy");
}
removed in the future");
}
- if attr::contains_name(&i.attrs[],
+ if attr::contains_name(&i.attrs[..],
"old_orphan_check") {
self.gate_feature(
"old_orphan_check",
"the new orphan check rules will eventually be strictly enforced");
}
- if attr::contains_name(&i.attrs[],
+ if attr::contains_name(&i.attrs[..],
"old_impl_check") {
self.gate_feature("old_impl_check",
i.span,
}
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
- if attr::contains_name(&i.attrs[], "linkage") {
+ if attr::contains_name(&i.attrs, "linkage") {
self.gate_feature("linkage", i.span,
"the `linkage` attribute is experimental \
and not portable across platforms")
let line = rdr.read_one_line_comment();
debug!("{}", line);
// Doc comments are not put in comments.
- if is_doc_comment(&line[]) {
+ if is_doc_comment(&line[..]) {
break;
}
lines.push(line);
fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> ,
s: String, col: CharPos) {
let len = s.len();
- let s1 = match all_whitespace(&s[], col) {
+ let s1 = match all_whitespace(&s[..], col) {
Some(col) => {
if col < len {
(&s[col..len]).to_string()
rdr.bump();
rdr.bump();
}
- if is_block_doc_comment(&curr_line[]) {
+ if is_block_doc_comment(&curr_line[..]) {
return
}
assert!(!curr_line.contains_char('\n'));
let mut m = m.to_string();
m.push_str(": ");
for c in c.escape_default() { m.push(c) }
- self.fatal_span_(from_pos, to_pos, &m[]);
+ self.fatal_span_(from_pos, to_pos, &m[..]);
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
let mut m = m.to_string();
m.push_str(": ");
for c in c.escape_default() { m.push(c) }
- self.err_span_(from_pos, to_pos, &m[]);
+ self.err_span_(from_pos, to_pos, &m[..]);
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
let from = self.byte_offset(from_pos).to_usize();
let to = self.byte_offset(to_pos).to_usize();
m.push_str(&self.filemap.src[from..to]);
- self.fatal_span_(from_pos, to_pos, &m[]);
+ self.fatal_span_(from_pos, to_pos, &m[..]);
}
/// Advance peek_tok and peek_span to refer to the next token, and
self.translate_crlf(start_bpos, string,
"bare CR not allowed in block doc-comment")
} else { string.into_cow() };
- token::DocComment(token::intern(&string[]))
+ token::DocComment(token::intern(&string[..]))
} else {
token::Comment
};
unreachable!()
}
};
- match str::from_utf8(&bytes[]).ok() {
+ match str::from_utf8(&bytes[..]).ok() {
Some(s) => {
return string_to_filemap(sess, s.to_string(),
path.as_str().unwrap().to_string())
}
let msg = format!("lexer should have rejected a bad character escape {}", lit);
- let msg2 = &msg[];
+ let msg2 = &msg[..];
fn esc(len: usize, lit: &str) -> Option<(char, isize)> {
num::from_str_radix(&lit[2..len], 16).ok()
// s can only be ascii, byte indexing is fine
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
- let mut s = &s2[];
+ let mut s = &s2[..];
debug!("integer_lit: {}, {:?}", s, suffix);
#[test]
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
- let tts: &[ast::TokenTree] = &tts[];
+ let tts: &[ast::TokenTree] = &tts[..];
match tts {
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
ast::TtToken(_, token::Not),
let use_s = "use foo::bar::baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&*vitem);
- assert_eq!(&vitem_s[], use_s);
+ assert_eq!(&vitem_s[..], use_s);
let use_s = "use foo::bar as baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&*vitem);
- assert_eq!(&vitem_s[], use_s);
+ assert_eq!(&vitem_s[..], use_s);
}
#[test] fn parse_extern_crate() {
let ex_s = "extern crate foo;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&*vitem);
- assert_eq!(&vitem_s[], ex_s);
+ assert_eq!(&vitem_s[..], ex_s);
let ex_s = "extern crate \"foo\" as bar;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&*vitem);
- assert_eq!(&vitem_s[], ex_s);
+ assert_eq!(&vitem_s[..], ex_s);
}
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
- assert_eq!(&doc[], "/// doc comment");
+ assert_eq!(&doc[..], "/// doc comment");
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap();
let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
- assert_eq!(&docs[], b);
+ assert_eq!(&docs[..], b);
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
- assert_eq!(&doc[], "/** doc comment\n * with CRLF */");
+ assert_eq!(&doc[..], "/** doc comment\n * with CRLF */");
}
#[test]
let span = tts.iter().rev().next().unwrap().get_span();
match sess.span_diagnostic.cm.span_to_snippet(span) {
- Ok(s) => assert_eq!(&s[], "{ body }"),
+ Ok(s) => assert_eq!(&s[..], "{ body }"),
Err(_) => panic!("could not get snippet"),
}
}
sp: Span,
kind: ObsoleteSyntax,
kind_str: &str,
- desc: &str);
+ desc: &str,
+ error: bool);
fn is_obsolete_ident(&mut self, ident: &str) -> bool;
fn eat_obsolete_ident(&mut self, ident: &str) -> bool;
}
),
ObsoleteSyntax::ClosureType => (
"`|usize| -> bool` closure type",
- "use unboxed closures instead, no type annotation needed"
+ "use unboxed closures instead, no type annotation needed",
true,
),
ObsoleteSyntax::ClosureKind => (
"`:`, `&mut:`, or `&:`",
- "rely on inference instead"
+ "rely on inference instead",
true,
),
ObsoleteSyntax::Sized => (
"`Sized? T` for removing the `Sized` bound",
- "write `T: ?Sized` instead"
+ "write `T: ?Sized` instead",
true,
),
ObsoleteSyntax::EmptyIndex => (
let token_str = Parser::token_to_string(t);
let last_span = self.last_span;
self.span_fatal(last_span, &format!("unexpected token: `{}`",
- token_str)[]);
+ token_str));
}
pub fn unexpected(&mut self) -> ! {
let this_token_str = self.this_token_to_string();
self.fatal(&format!("expected `{}`, found `{}`",
token_str,
- this_token_str)[])
+ this_token_str))
}
} else {
self.expect_one_of(slice::ref_slice(t), &[]);
expected.push_all(&*self.expected_tokens);
expected.sort_by(|a, b| a.to_string().cmp(&b.to_string()));
expected.dedup();
- let expect = tokens_to_string(&expected[]);
+ let expect = tokens_to_string(&expected[..]);
let actual = self.this_token_to_string();
self.fatal(
&(if expected.len() > 1 {
(format!("expected {}, found `{}`",
expect,
actual))
- }[])
+ })[..]
)
}
}
// might be unit-struct construction; check for recoverableinput error.
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
expected.push_all(inedible);
- self.check_for_erroneous_unit_struct_expecting(&expected[]);
+ self.check_for_erroneous_unit_struct_expecting(&expected[..]);
}
self.expect_one_of(edible, inedible)
}
.as_ref()
.map_or(false, |t| t.is_ident() || t.is_path()) {
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
- expected.push_all(&inedible[]);
+ expected.push_all(&inedible[..]);
self.check_for_erroneous_unit_struct_expecting(
- &expected[]);
+ &expected[..]);
}
self.expect_one_of(edible, inedible)
}
_ => {
let token_str = self.this_token_to_string();
self.fatal(&format!("expected ident, found `{}`",
- token_str)[])
+ token_str))
}
}
}
let span = self.span;
self.span_err(span,
&format!("expected identifier, found keyword `{}`",
- token_str)[]);
+ token_str));
}
}
if self.token.is_reserved_keyword() {
let token_str = self.this_token_to_string();
self.fatal(&format!("`{}` is a reserved keyword",
- token_str)[])
+ token_str))
}
}
let this_token_str = self.this_token_to_string();
self.fatal(&format!("expected `{}`, found `{}`",
gt_str,
- this_token_str)[])
+ this_token_str))
}
}
}
let (inner_attrs, body) =
p.parse_inner_attrs_and_block();
let mut attrs = attrs;
- attrs.push_all(&inner_attrs[]);
+ attrs.push_all(&inner_attrs[..]);
ProvidedMethod(P(ast::Method {
attrs: attrs,
id: ast::DUMMY_NODE_ID,
_ => {
let token_str = p.this_token_to_string();
p.fatal(&format!("expected `;` or `{{`, found `{}`",
- token_str)[])
+ token_str)[..])
}
}
}
} else {
let this_token_str = self.this_token_to_string();
let msg = format!("expected type, found `{}`", this_token_str);
- self.fatal(&msg[]);
+ self.fatal(&msg[..]);
};
let sp = mk_sp(lo, self.last_span.hi);
token::StrRaw(s, n) => {
(true,
LitStr(
- token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())[]),
+ token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())),
ast::RawStr(n)))
}
token::Binary(i) =>
};
}
_ => {
- self.fatal(&format!("expected a lifetime name")[]);
+ self.fatal(&format!("expected a lifetime name"));
}
}
}
let msg = format!("expected `,` or `>` after lifetime \
name, found `{}`",
this_token_str);
- self.fatal(&msg[]);
+ self.fatal(&msg[..]);
}
}
}
let last_span = self.last_span;
let fstr = n.as_str();
self.span_err(last_span,
- &format!("unexpected token: `{}`", n.as_str())[]);
+ &format!("unexpected token: `{}`", n.as_str()));
if fstr.chars().all(|x| "0123456789.".contains_char(x)) {
let float = match fstr.parse::<f64>().ok() {
Some(f) => f,
self.span_help(last_span,
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
float.trunc() as usize,
- &float.fract().to_string()[1..])[]);
+ &float.fract().to_string()[1..]));
}
self.abort_if_errors();
match self.token {
token::SubstNt(name, _) =>
self.fatal(&format!("unknown macro variable `{}`",
- token::get_ident(name))[]),
+ token::get_ident(name))),
_ => {}
}
}
};
let token_str = p.this_token_to_string();
p.fatal(&format!("incorrect close delimiter: `{}`",
- token_str)[])
+ token_str))
},
/* we ought to allow different depths of unquotation */
token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => {
let this_token_to_string = self.this_token_to_string();
self.span_err(span,
&format!("expected expression, found `{}`",
- this_token_to_string)[]);
+ this_token_to_string));
let box_span = mk_sp(lo, self.last_span.hi);
self.span_help(box_span,
"perhaps you meant `box() (foo)` instead?");
if self.token != token::CloseDelim(token::Brace) {
let token_str = self.this_token_to_string();
self.fatal(&format!("expected `{}`, found `{}`", "}",
- token_str)[])
+ token_str))
}
etc = true;
break;
let span = self.span;
let tok_str = self.this_token_to_string();
self.span_fatal(span,
- &format!("expected identifier, found `{}`", tok_str)[]);
+ &format!("expected identifier, found `{}`", tok_str));
}
let ident = self.parse_ident();
let last_span = self.last_span;
let lo = self.span.lo;
if self.check_keyword(keywords::Let) {
- check_expected_item(self, &item_attrs[]);
+ check_expected_item(self, &item_attrs[..]);
self.expect_keyword(keywords::Let);
let decl = self.parse_let();
P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))
&& self.look_ahead(1, |t| *t == token::Not) {
// it's a macro invocation:
- check_expected_item(self, &item_attrs[]);
+ check_expected_item(self, &item_attrs[..]);
// Potential trouble: if we allow macros with paths instead of
// idents, we'd need to look ahead past the whole path here...
let tok_str = self.this_token_to_string();
self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
ident_str,
- tok_str)[])
+ tok_str))
},
};
}
} else {
let found_attrs = !item_attrs.is_empty();
- let item_err = Parser::expected_item_err(&item_attrs[]);
+ let item_err = Parser::expected_item_err(&item_attrs[..]);
match self.parse_item_(item_attrs, false) {
Ok(i) => {
let hi = i.span.hi;
let sp = self.span;
let tok = self.this_token_to_string();
self.span_fatal_help(sp,
- &format!("expected `{{`, found `{}`", tok)[],
+ &format!("expected `{{`, found `{}`", tok),
"place this code inside a block");
}
while self.token != token::CloseDelim(token::Brace) {
// parsing items even when they're not allowed lets us give
// better error messages and recover more gracefully.
- attributes_box.push_all(&self.parse_outer_attributes()[]);
+ attributes_box.push_all(&self.parse_outer_attributes());
match self.token {
token::Semi => {
if !attributes_box.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(&attributes_box[]));
+ Parser::expected_item_err(&attributes_box[..]));
attributes_box = Vec::new();
}
self.bump(); // empty
if !attributes_box.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(&attributes_box[]));
+ Parser::expected_item_err(&attributes_box[..]));
}
let hi = self.span.hi;
_ => {
let token_str = self.this_token_to_string();
self.fatal(&format!("expected `self`, found `{}`",
- token_str)[])
+ token_str))
}
}
}
let (inner_attrs, body) = self.parse_inner_attrs_and_block();
let body_span = body.span;
let mut new_attrs = attrs;
- new_attrs.push_all(&inner_attrs[]);
+ new_attrs.push_all(&inner_attrs[..]);
(ast::MethDecl(ident,
generics,
abi,
// We parsed attributes for the first item but didn't find it
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(&attrs[]));
+ Parser::expected_item_err(&attrs[..]));
}
ast::Mod {
let mod_name = mod_string.to_string();
let default_path_str = format!("{}.rs", mod_name);
let secondary_path_str = format!("{}/mod.rs", mod_name);
- let default_path = dir_path.join(&default_path_str[]);
- let secondary_path = dir_path.join(&secondary_path_str[]);
+ let default_path = dir_path.join(&default_path_str[..]);
+ let secondary_path = dir_path.join(&secondary_path_str[..]);
let default_exists = default_path.exists();
let secondary_exists = secondary_path.exists();
err.push_str(" -> ");
}
err.push_str(&path.display().as_cow()[]);
- self.span_fatal(id_sp, &err[]);
+ self.span_fatal(id_sp, &err[..]);
}
None => ()
}
if self.eat_keyword(keywords::Mod) {
// MODULE ITEM
let (ident, item_, extra_attrs) =
- self.parse_item_mod(&attrs[]);
+ self.parse_item_mod(&attrs[..]);
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
if !attrs.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(&attrs[]));
+ Parser::expected_item_err(&attrs[..]));
}
foreign_items
$(init_vec.push($si_str);)*
$(init_vec.push($sk_str);)*
$(init_vec.push($rk_str);)*
- interner::StrInterner::prefill(&init_vec[])
+ interner::StrInterner::prefill(&init_vec[..])
}
}}
// of `BytesContainer`, which is itself a workaround for the lack of
// DST.
unsafe {
- let this = &self[];
+ let this = &self[..];
mem::transmute::<&[u8],&[u8]>(this.container_as_bytes())
}
}
assert_eq!(l, len);
// assert!(l <= space);
self.space -= len;
- self.print_str(&s[])
+ self.print_str(&s[..])
}
Token::Eof => {
// Eof should never get here.
pub fn synth_comment(&mut self, text: String) -> IoResult<()> {
try!(word(&mut self.s, "/*"));
try!(space(&mut self.s));
- try!(word(&mut self.s, &text[]));
+ try!(word(&mut self.s, &text[..]));
try!(space(&mut self.s));
word(&mut self.s, "*/")
}
}
ast::TyTup(ref elts) => {
try!(self.popen());
- try!(self.commasep(Inconsistent, &elts[],
+ try!(self.commasep(Inconsistent, &elts[..],
|s, ty| s.print_type(&**ty)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
}
ast::TyObjectSum(ref ty, ref bounds) => {
try!(self.print_type(&**ty));
- try!(self.print_bounds("+", &bounds[]));
+ try!(self.print_bounds("+", &bounds[..]));
}
ast::TyPolyTraitRef(ref bounds) => {
- try!(self.print_bounds("", &bounds[]));
+ try!(self.print_bounds("", &bounds[..]));
}
ast::TyQPath(ref qpath) => {
try!(self.print_qpath(&**qpath, false))
real_bounds.push(b);
}
}
- try!(self.print_bounds(":", &real_bounds[]));
+ try!(self.print_bounds(":", &real_bounds[..]));
try!(self.print_where_clause(generics));
try!(word(&mut self.s, " "));
try!(self.bopen());
try!(self.print_ident(item.ident));
try!(self.cbox(indent_unit));
try!(self.popen());
- try!(self.print_tts(&tts[]));
+ try!(self.print_tts(&tts[..]));
try!(self.pclose());
try!(word(&mut self.s, ";"));
try!(self.end());
if !args.is_empty() {
try!(self.popen());
try!(self.commasep(Consistent,
- &args[],
+ &args[..],
|s, arg| s.print_type(&*arg.ty)));
try!(self.pclose());
}
try!(word(&mut self.s, "! "));
try!(self.cbox(indent_unit));
try!(self.popen());
- try!(self.print_tts(&tts[]));
+ try!(self.print_tts(&tts[..]));
try!(self.pclose());
try!(word(&mut self.s, ";"));
self.end()
fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>]) -> IoResult<()> {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
- try!(self.commasep_exprs(Inconsistent, &exprs[]));
+ try!(self.commasep_exprs(Inconsistent, &exprs[..]));
try!(word(&mut self.s, "]"));
self.end()
}
try!(word(&mut self.s, "{"));
try!(self.commasep_cmnt(
Consistent,
- &fields[],
+ &fields[..],
|s, field| {
try!(s.ibox(indent_unit));
try!(s.print_ident(field.ident.node));
fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>]) -> IoResult<()> {
try!(self.popen());
- try!(self.commasep_exprs(Inconsistent, &exprs[]));
+ try!(self.commasep_exprs(Inconsistent, &exprs[..]));
if exprs.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.print_expr_box(place, &**expr));
}
ast::ExprVec(ref exprs) => {
- try!(self.print_expr_vec(&exprs[]));
+ try!(self.print_expr_vec(&exprs[..]));
}
ast::ExprRepeat(ref element, ref count) => {
try!(self.print_expr_repeat(&**element, &**count));
}
ast::ExprStruct(ref path, ref fields, ref wth) => {
- try!(self.print_expr_struct(path, &fields[], wth));
+ try!(self.print_expr_struct(path, &fields[..], wth));
}
ast::ExprTup(ref exprs) => {
- try!(self.print_expr_tup(&exprs[]));
+ try!(self.print_expr_tup(&exprs[..]));
}
ast::ExprCall(ref func, ref args) => {
- try!(self.print_expr_call(&**func, &args[]));
+ try!(self.print_expr_call(&**func, &args[..]));
}
ast::ExprMethodCall(ident, ref tys, ref args) => {
- try!(self.print_expr_method_call(ident, &tys[], &args[]));
+ try!(self.print_expr_method_call(ident, &tys[..], &args[..]));
}
ast::ExprBinary(op, ref lhs, ref rhs) => {
try!(self.print_expr_binary(op, &**lhs, &**rhs));
pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
if self.encode_idents_with_hygiene {
let encoded = ident.encode_with_hygiene();
- try!(word(&mut self.s, &encoded[]))
+ try!(word(&mut self.s, &encoded[..]))
} else {
try!(word(&mut self.s, &token::get_ident(ident)))
}
Some(ref args) => {
if !args.is_empty() {
try!(self.popen());
- try!(self.commasep(Inconsistent, &args[],
+ try!(self.commasep(Inconsistent, &args[..],
|s, p| s.print_pat(&**p)));
try!(self.pclose());
}
try!(self.nbsp());
try!(self.word_space("{"));
try!(self.commasep_cmnt(
- Consistent, &fields[],
+ Consistent, &fields[..],
|s, f| {
try!(s.cbox(indent_unit));
if !f.node.is_shorthand {
ast::PatTup(ref elts) => {
try!(self.popen());
try!(self.commasep(Inconsistent,
- &elts[],
+ &elts[..],
|s, p| s.print_pat(&**p)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
ast::PatVec(ref before, ref slice, ref after) => {
try!(word(&mut self.s, "["));
try!(self.commasep(Inconsistent,
- &before[],
+ &before[..],
|s, p| s.print_pat(&**p)));
if let Some(ref p) = *slice {
if !before.is_empty() { try!(self.word_space(",")); }
if !after.is_empty() { try!(self.word_space(",")); }
}
try!(self.commasep(Inconsistent,
- &after[],
+ &after[..],
|s, p| s.print_pat(&**p)));
try!(word(&mut self.s, "]"));
}
ints.push(i);
}
- try!(self.commasep(Inconsistent, &ints[], |s, &idx| {
+ try!(self.commasep(Inconsistent, &ints[..], |s, &idx| {
if idx < generics.lifetimes.len() {
let lifetime = &generics.lifetimes[idx];
s.print_lifetime_def(lifetime)
try!(word(&mut self.s, &name));
}
ast::MetaNameValue(ref name, ref value) => {
- try!(self.word_space(&name[]));
+ try!(self.word_space(&name[..]));
try!(self.word_space("="));
try!(self.print_literal(value));
}
try!(word(&mut self.s, &name));
try!(self.popen());
try!(self.commasep(Consistent,
- &items[],
+ &items[..],
|s, i| s.print_meta_item(&**i)));
try!(self.pclose());
}
try!(self.print_path(path, false));
try!(word(&mut self.s, "::{"));
}
- try!(self.commasep(Inconsistent, &idents[], |s, w| {
+ try!(self.commasep(Inconsistent, &idents[..], |s, w| {
match w.node {
ast::PathListIdent { name, .. } => {
s.print_ident(name)
let mut res = String::from_str("b'");
res.extend(ascii::escape_default(byte).map(|c| c as char));
res.push('\'');
- word(&mut self.s, &res[])
+ word(&mut self.s, &res[..])
}
ast::LitChar(ch) => {
let mut res = String::from_str("'");
res.extend(ch.escape_default());
res.push('\'');
- word(&mut self.s, &res[])
+ word(&mut self.s, &res[..])
}
ast::LitInt(i, t) => {
match t {
&f,
&ast_util::float_ty_to_string(t)[])[])
}
- ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[]),
+ ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[..]),
ast::LitBool(val) => {
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
}
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
- try!(word(&mut self.s, &line[]));
+ try!(word(&mut self.s, &line[..]));
}
try!(hardbreak(&mut self.s));
}
try!(self.ibox(0));
for line in &cmnt.lines {
if !line.is_empty() {
- try!(word(&mut self.s, &line[]));
+ try!(word(&mut self.s, &line[..]));
}
try!(hardbreak(&mut self.s));
}
string=st))
}
};
- word(&mut self.s, &st[])
+ word(&mut self.s, &st[..])
}
pub fn next_comment(&mut self) -> Option<comments::Comment> {
// The name to use in `extern crate "name" as std;`
let actual_crate_name = match self.alt_std_name {
- Some(ref s) => token::intern_and_get_ident(&s[]),
+ Some(ref s) => token::intern_and_get_ident(&s[..]),
None => token::intern_and_get_ident("std"),
};
// When not compiling with --test we should not compile the
// #[test] functions
config::strip_items(krate, |attrs| {
- !attr::contains_name(&attrs[], "test") &&
- !attr::contains_name(&attrs[], "bench")
+ !attr::contains_name(&attrs[..], "test") &&
+ !attr::contains_name(&attrs[..], "bench")
})
}
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crate_name(&krate.attrs[]) {
- Some(ref s) if "test" == &s[] => true,
+ Some(ref s) if "test" == &s[..] => true,
_ => false
}
}
// creates $name: $expr
let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr);
- debug!("encoding {}", ast_util::path_name_i(&path[]));
+ debug!("encoding {}", ast_util::path_name_i(&path[..]));
// path to the #[test] function: "foo::bar::baz"
- let path_string = ast_util::path_name_i(&path[]);
- let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[]));
+ let path_string = ast_util::path_name_i(&path[..]);
+ let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..]));
// self::test::StaticTestName($name_expr)
let name_expr = ecx.expr_call(span,
impl Ord for RcStr {
fn cmp(&self, other: &RcStr) -> Ordering {
- self[].cmp(&other[])
+ self[..].cmp(&other[..])
}
}
impl fmt::Debug for RcStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Debug;
- self[].fmt(f)
+ self[..].fmt(f)
}
}
impl fmt::Display for RcStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
- self[].fmt(f)
+ self[..].fmt(f)
}
}
impl BorrowFrom<RcStr> for str {
fn borrow_from(owned: &RcStr) -> &str {
- &owned.string[]
+ &owned.string[..]
}
}
impl Deref for RcStr {
type Target = str;
- fn deref(&self) -> &str { &self.string[] }
+ fn deref(&self) -> &str { &self.string[..] }
}
/// A StrInterner differs from Interner<String> in that it accepts
}
};
- let entry = open(&term[]);
+ let entry = open(&term[..]);
if entry.is_err() {
if env::var("MSYSCON").ok().map_or(false, |s| {
"mintty.exe" == s
for p in &dirs_to_search {
if p.exists() {
let f = first_char.to_string();
- let newp = p.join_many(&[&f[], term]);
+ let newp = p.join_many(&[&f[..], term]);
if newp.exists() {
return Some(box newp);
}
// on some installations the dir is named after the hex of the char (e.g. OS X)
let f = format!("{:x}", first_char as uint);
- let newp = p.join_many(&[&f[], term]);
+ let newp = p.join_many(&[&f[..], term]);
if newp.exists() {
return Some(box newp);
}
st.write_failures().unwrap();
let s = match st.out {
- Raw(ref m) => String::from_utf8_lossy(&m[]),
+ Raw(ref m) => String::from_utf8_lossy(&m[..]),
Pretty(_) => unreachable!()
};
None => filtered,
Some(ref filter) => {
filtered.into_iter().filter(|test| {
- test.desc.name.as_slice().contains(&filter[])
+ test.desc.name.as_slice().contains(&filter[..])
}).collect()
}
};
{
let urls = markdown_data.replace(".md)", ".html)");
try!(File::create(&preprocessed_path)
- .write_str(&urls[]));
+ .write_str(&urls[..]));
}
// write the prelude to a temporary HTML file for rustdoc inclusion
impl Error for String {
fn description<'a>(&'a self) -> &'a str {
- &self[]
+ &self[..]
}
}
self.desc
}
fn detail(&self) -> Option<&str> {
- self.detail.as_ref().map(|s| &s[])
+ self.detail.as_ref().map(|s| &s[..])
}
}
}
Err(errors) => {
for err in errors {
- term.err(&err[]);
+ term.err(&err[..]);
}
return Err(box "There was an error." as Box<Error>);
}
fn check_item(&mut self, cx: &Context, it: &ast::Item) {
let name = token::get_ident(it.ident);
- if &name[] == "lintme" {
+ if &name[..] == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
- } else if &name[] == "pleaselintme" {
+ } else if &name[..] == "pleaselintme" {
cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'");
}
}
fn check_item(&mut self, cx: &Context, it: &ast::Item) {
let name = token::get_ident(it.ident);
- if &name[] == "lintme" {
+ if &name[..] == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
}
}
_: &[ast::TokenTree]) -> Box<MacResult+'cx> {
let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i))
.collect::<Vec<_>>().connect(", ");
- let interned = token::intern_and_get_ident(&args[]);
+ let interned = token::intern_and_get_ident(&args[..]);
MacExpr::new(ecx.expr_str(sp, interned))
}
}
#[cfg(cannot_use_this_yet)]
fn foo<'a>(map: RefCell<HashMap<&'static str, &'a [u8]>>) {
let one = [1u];
- assert_eq!(map.borrow().get("one"), Some(&one[]));
+ assert_eq!(map.borrow().get("one"), Some(&one[..]));
}
#[cfg(cannot_use_this_yet_either)]
let one = [1u8];
let two = [2u8];
let mut map = HashMap::new();
- map.insert("zero", &zer[]);
- map.insert("one", &one[]);
- map.insert("two", &two[]);
+ map.insert("zero", &zer[..]);
+ map.insert("one", &one[..]);
+ map.insert("two", &two[..]);
let map = RefCell::new(map);
foo(map);
}