From: Jorge Aparicio Date: Wed, 7 Jan 2015 16:58:31 +0000 (-0500) Subject: use slicing sugar X-Git-Url: https://git.lizzy.rs/?a=commitdiff_plain;h=517f1cc63c1a5df148fdeef56791f66771d3d8e8;p=rust.git use slicing sugar --- diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index f8e2ba4828f..5de93c52029 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -908,8 +908,7 @@ fn check_error_patterns(props: &TestProps, } if done { return; } - let missing_patterns = - props.error_patterns.index(&(next_err_idx..)); + let missing_patterns = &props.error_patterns[next_err_idx..]; if missing_patterns.len() == 1u { fatal_proc_rec(format!("error pattern '{}' not found!", missing_patterns[0]).as_slice(), diff --git a/src/libcollections/bit.rs b/src/libcollections/bit.rs index 2154d06377a..7d6de7c1c46 100644 --- a/src/libcollections/bit.rs +++ b/src/libcollections/bit.rs @@ -330,7 +330,7 @@ pub fn from_bytes(bytes: &[u8]) -> Bitv { if extra_bytes > 0 { let mut last_word = 0u32; - for (i, &byte) in bytes.index(&((complete_words*4)..)).iter().enumerate() { + for (i, &byte) in bytes[(complete_words*4)..].iter().enumerate() { last_word |= (reverse_bits(byte) as u32) << (i * 8); } bitv.storage.push(last_word); diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs index 42c17136a08..adb7020c7ae 100644 --- a/src/libcollections/ring_buf.rs +++ b/src/libcollections/ring_buf.rs @@ -556,7 +556,7 @@ pub fn as_slices<'a>(&'a self) -> (&'a [T], &'a [T]) { let buf = self.buffer_as_slice(); if contiguous { let (empty, buf) = buf.split_at(0); - (buf.index(&(self.tail..self.head)), empty) + (&buf[self.tail..self.head], empty) } else { let (mid, right) = buf.split_at(self.tail); let (left, _) = mid.split_at(self.head); diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 61309f0cc0f..fdc551f5b19 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -55,7 +55,7 @@ //! #![feature(slicing_syntax)] //! fn main() { //! let numbers = [0i, 1i, 2i]; -//! let last_numbers = numbers.index(&(1..3)); +//! let last_numbers = &numbers[1..3]; //! // last_numbers is now &[1i, 2i] //! } //! ``` @@ -98,7 +98,7 @@ use core::marker::Sized; use core::mem::size_of; use core::mem; -use core::ops::{FnMut, FullRange, Index, IndexMut}; +use core::ops::{FnMut, FullRange}; use core::option::Option::{self, Some, None}; use core::ptr::PtrExt; use core::ptr; @@ -1065,12 +1065,12 @@ pub fn new(length: uint) -> ElementSwaps { #[unstable = "trait is unstable"] impl BorrowFrom> for [T] { - fn borrow_from(owned: &Vec) -> &[T] { owned.index(&FullRange) } + fn borrow_from(owned: &Vec) -> &[T] { &owned[] } } #[unstable = "trait is unstable"] impl BorrowFromMut> for [T] { - fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { owned.index_mut(&FullRange) } + fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { &mut owned[] } } #[unstable = "trait is unstable"] @@ -1400,7 +1400,6 @@ mod tests { use core::prelude::{Ord, FullRange}; use core::default::Default; use core::mem; - use core::ops::Index; use std::iter::RandomAccessIterator; use std::rand::{Rng, thread_rng}; use std::rc::Rc; @@ -1611,7 +1610,7 @@ fn test_slice() { // Test on stack. let vec_stack: &[_] = &[1i, 2, 3]; - let v_b = vec_stack.index(&(1u..3u)).to_vec(); + let v_b = vec_stack[1u..3u].to_vec(); assert_eq!(v_b.len(), 2u); let v_b = v_b.as_slice(); assert_eq!(v_b[0], 2); @@ -1619,7 +1618,7 @@ fn test_slice() { // Test `Box<[T]>` let vec_unique = vec![1i, 2, 3, 4, 5, 6]; - let v_d = vec_unique.index(&(1u..6u)).to_vec(); + let v_d = vec_unique[1u..6u].to_vec(); assert_eq!(v_d.len(), 5u); let v_d = v_d.as_slice(); assert_eq!(v_d[0], 2); @@ -1632,21 +1631,21 @@ fn test_slice() { #[test] fn test_slice_from() { let vec: &[int] = &[1, 2, 3, 4]; - assert_eq!(vec.index(&(0..)), vec); + assert_eq!(&vec[0..], vec); let b: &[int] = &[3, 4]; - assert_eq!(vec.index(&(2..)), b); + assert_eq!(&vec[2..], b); let b: &[int] = &[]; - assert_eq!(vec.index(&(4..)), b); + assert_eq!(&vec[4..], b); } #[test] fn test_slice_to() { let vec: &[int] = &[1, 2, 3, 4]; - assert_eq!(vec.index(&(0..4)), vec); + assert_eq!(&vec[0..4], vec); let b: &[int] = &[1, 2]; - assert_eq!(vec.index(&(0..2)), b); + assert_eq!(&vec[0..2], b); let b: &[int] = &[]; - assert_eq!(vec.index(&(0..0)), b); + assert_eq!(&vec[0..0], b); } @@ -2572,7 +2571,7 @@ fn test_iter_zero_sized() { } assert_eq!(cnt, 3); - for f in v.index(&(1..3)).iter() { + for f in v[1..3].iter() { assert!(*f == Foo); cnt += 1; } diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 09d140067f4..ccf654ac0a0 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -386,7 +386,7 @@ macro_rules! utf8_acc_cont_byte { #[unstable = "trait is unstable"] impl BorrowFrom for str { - fn borrow_from(owned: &String) -> &str { owned.index(&FullRange) } + fn borrow_from(owned: &String) -> &str { &owned[] } } #[unstable = "trait is unstable"] @@ -464,7 +464,7 @@ fn replace(&self, from: &str, to: &str) -> String { #[unstable = "this functionality may be moved to libunicode"] fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self.index(&FullRange).chars(), + iter: self[].chars(), buffer: Vec::new(), sorted: false, kind: Canonical @@ -477,7 +477,7 @@ fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { #[unstable = "this functionality may be moved to libunicode"] fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self.index(&FullRange).chars(), + iter: self[].chars(), buffer: Vec::new(), sorted: false, kind: Compatible @@ -525,7 +525,7 @@ fn nfkc_chars<'a>(&'a self) -> Recompositions<'a> { /// ``` #[stable] fn contains(&self, pat: &str) -> bool { - core_str::StrExt::contains(self.index(&FullRange), pat) + core_str::StrExt::contains(&self[], pat) } /// Returns true if a string contains a char pattern. @@ -541,7 +541,7 @@ fn contains(&self, pat: &str) -> bool { /// ``` #[unstable = "might get removed in favour of a more generic contains()"] fn contains_char(&self, pat: P) -> bool { - core_str::StrExt::contains_char(self.index(&FullRange), pat) + core_str::StrExt::contains_char(&self[], pat) } /// An iterator over the characters of `self`. Note, this iterates @@ -555,7 +555,7 @@ fn contains_char(&self, pat: P) -> bool { /// ``` #[stable] fn chars(&self) -> Chars { - core_str::StrExt::chars(self.index(&FullRange)) + core_str::StrExt::chars(&self[]) } /// An iterator over the bytes of `self` @@ -568,13 +568,13 @@ fn chars(&self) -> Chars { /// ``` #[stable] fn bytes(&self) -> Bytes { - core_str::StrExt::bytes(self.index(&FullRange)) + core_str::StrExt::bytes(&self[]) } /// An iterator over the characters of `self` and their byte offsets. #[stable] fn char_indices(&self) -> CharIndices { - core_str::StrExt::char_indices(self.index(&FullRange)) + core_str::StrExt::char_indices(&self[]) } /// An iterator over substrings of `self`, separated by characters @@ -597,7 +597,7 @@ fn char_indices(&self) -> CharIndices { /// ``` #[stable] fn split(&self, pat: P) -> Split

{ - core_str::StrExt::split(self.index(&FullRange), pat) + core_str::StrExt::split(&self[], pat) } /// An iterator over substrings of `self`, separated by characters @@ -624,7 +624,7 @@ fn split(&self, pat: P) -> Split

{ /// ``` #[stable] fn splitn(&self, count: uint, pat: P) -> SplitN

{ - core_str::StrExt::splitn(self.index(&FullRange), count, pat) + core_str::StrExt::splitn(&self[], count, pat) } /// An iterator over substrings of `self`, separated by characters @@ -653,7 +653,7 @@ fn splitn(&self, count: uint, pat: P) -> SplitN

{ /// ``` #[unstable = "might get removed"] fn split_terminator(&self, pat: P) -> SplitTerminator

{ - core_str::StrExt::split_terminator(self.index(&FullRange), pat) + core_str::StrExt::split_terminator(&self[], pat) } /// An iterator over substrings of `self`, separated by characters @@ -674,7 +674,7 @@ fn split_terminator(&self, pat: P) -> SplitTerminator

{ /// ``` #[stable] fn rsplitn(&self, count: uint, pat: P) -> RSplitN

{ - core_str::StrExt::rsplitn(self.index(&FullRange), count, pat) + core_str::StrExt::rsplitn(&self[], count, pat) } /// An iterator over the start and end indices of the disjoint @@ -699,7 +699,7 @@ fn rsplitn(&self, count: uint, pat: P) -> RSplitN

{ /// ``` #[unstable = "might have its iterator type changed"] fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> { - core_str::StrExt::match_indices(self.index(&FullRange), pat) + core_str::StrExt::match_indices(&self[], pat) } /// An iterator over the substrings of `self` separated by the pattern `sep`. @@ -715,7 +715,7 @@ fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> { /// ``` #[unstable = "might get removed in the future in favor of a more generic split()"] fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> { - core_str::StrExt::split_str(self.index(&FullRange), pat) + core_str::StrExt::split_str(&self[], pat) } /// An iterator over the lines of a string (subsequences separated @@ -731,7 +731,7 @@ fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> { /// ``` #[stable] fn lines(&self) -> Lines { - core_str::StrExt::lines(self.index(&FullRange)) + core_str::StrExt::lines(&self[]) } /// An iterator over the lines of a string, separated by either @@ -747,7 +747,7 @@ fn lines(&self) -> Lines { /// ``` #[stable] fn lines_any(&self) -> LinesAny { - core_str::StrExt::lines_any(self.index(&FullRange)) + core_str::StrExt::lines_any(&self[]) } /// Returns a slice of the given string from the byte range @@ -782,7 +782,7 @@ fn lines_any(&self) -> LinesAny { /// ``` #[unstable = "use slice notation [a..b] instead"] fn slice(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice(self.index(&FullRange), begin, end) + core_str::StrExt::slice(&self[], begin, end) } /// Returns a slice of the string from `begin` to its end. @@ -795,7 +795,7 @@ fn slice(&self, begin: uint, end: uint) -> &str { /// See also `slice`, `slice_to` and `slice_chars`. #[unstable = "use slice notation [a..] instead"] fn slice_from(&self, begin: uint) -> &str { - core_str::StrExt::slice_from(self.index(&FullRange), begin) + core_str::StrExt::slice_from(&self[], begin) } /// Returns a slice of the string from the beginning to byte @@ -809,7 +809,7 @@ fn slice_from(&self, begin: uint) -> &str { /// See also `slice`, `slice_from` and `slice_chars`. #[unstable = "use slice notation [0..a] instead"] fn slice_to(&self, end: uint) -> &str { - core_str::StrExt::slice_to(self.index(&FullRange), end) + core_str::StrExt::slice_to(&self[], end) } /// Returns a slice of the string from the character range @@ -837,7 +837,7 @@ fn slice_to(&self, end: uint) -> &str { /// ``` #[unstable = "may have yet to prove its worth"] fn slice_chars(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice_chars(self.index(&FullRange), begin, end) + core_str::StrExt::slice_chars(&self[], begin, end) } /// Takes a bytewise (not UTF-8) slice from a string. @@ -848,7 +848,7 @@ fn slice_chars(&self, begin: uint, end: uint) -> &str { /// the entire slice as well. #[stable] unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice_unchecked(self.index(&FullRange), begin, end) + core_str::StrExt::slice_unchecked(&self[], begin, end) } /// Returns true if the pattern `pat` is a prefix of the string. @@ -860,7 +860,7 @@ unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str { /// ``` #[stable] fn starts_with(&self, pat: &str) -> bool { - core_str::StrExt::starts_with(self.index(&FullRange), pat) + core_str::StrExt::starts_with(&self[], pat) } /// Returns true if the pattern `pat` is a suffix of the string. @@ -872,7 +872,7 @@ fn starts_with(&self, pat: &str) -> bool { /// ``` #[stable] fn ends_with(&self, pat: &str) -> bool { - core_str::StrExt::ends_with(self.index(&FullRange), pat) + core_str::StrExt::ends_with(&self[], pat) } /// Returns a string with all pre- and suffixes that match @@ -892,7 +892,7 @@ fn ends_with(&self, pat: &str) -> bool { /// ``` #[stable] fn trim_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_matches(self.index(&FullRange), pat) + core_str::StrExt::trim_matches(&self[], pat) } /// Returns a string with all prefixes that match @@ -912,7 +912,7 @@ fn trim_matches(&self, pat: P) -> &str { /// ``` #[stable] fn trim_left_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_left_matches(self.index(&FullRange), pat) + core_str::StrExt::trim_left_matches(&self[], pat) } /// Returns a string with all suffixes that match @@ -932,7 +932,7 @@ fn trim_left_matches(&self, pat: P) -> &str { /// ``` #[stable] fn trim_right_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_right_matches(self.index(&FullRange), pat) + core_str::StrExt::trim_right_matches(&self[], pat) } /// Check that `index`-th byte lies at the start and/or end of a @@ -960,7 +960,7 @@ fn trim_right_matches(&self, pat: P) -> &str { /// ``` #[unstable = "naming is uncertain with container conventions"] fn is_char_boundary(&self, index: uint) -> bool { - core_str::StrExt::is_char_boundary(self.index(&FullRange), index) + core_str::StrExt::is_char_boundary(&self[], index) } /// Pluck a character out of a string and return the index of the next @@ -1018,7 +1018,7 @@ fn is_char_boundary(&self, index: uint) -> bool { /// If `i` is not the index of the beginning of a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_range_at(&self, start: uint) -> CharRange { - core_str::StrExt::char_range_at(self.index(&FullRange), start) + core_str::StrExt::char_range_at(&self[], start) } /// Given a byte position and a str, return the previous char and its position. @@ -1033,7 +1033,7 @@ fn char_range_at(&self, start: uint) -> CharRange { /// If `i` is not an index following a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_range_at_reverse(&self, start: uint) -> CharRange { - core_str::StrExt::char_range_at_reverse(self.index(&FullRange), start) + core_str::StrExt::char_range_at_reverse(&self[], start) } /// Plucks the character starting at the `i`th byte of a string. @@ -1053,7 +1053,7 @@ fn char_range_at_reverse(&self, start: uint) -> CharRange { /// If `i` is not the index of the beginning of a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_at(&self, i: uint) -> char { - core_str::StrExt::char_at(self.index(&FullRange), i) + core_str::StrExt::char_at(&self[], i) } /// Plucks the character ending at the `i`th byte of a string. @@ -1064,7 +1064,7 @@ fn char_at(&self, i: uint) -> char { /// If `i` is not an index following a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_at_reverse(&self, i: uint) -> char { - core_str::StrExt::char_at_reverse(self.index(&FullRange), i) + core_str::StrExt::char_at_reverse(&self[], i) } /// Work with the byte buffer of a string as a byte slice. @@ -1076,7 +1076,7 @@ fn char_at_reverse(&self, i: uint) -> char { /// ``` #[stable] fn as_bytes(&self) -> &[u8] { - core_str::StrExt::as_bytes(self.index(&FullRange)) + core_str::StrExt::as_bytes(&self[]) } /// Returns the byte index of the first character of `self` that @@ -1104,7 +1104,7 @@ fn as_bytes(&self) -> &[u8] { /// ``` #[stable] fn find(&self, pat: P) -> Option { - core_str::StrExt::find(self.index(&FullRange), pat) + core_str::StrExt::find(&self[], pat) } /// Returns the byte index of the last character of `self` that @@ -1132,7 +1132,7 @@ fn find(&self, pat: P) -> Option { /// ``` #[stable] fn rfind(&self, pat: P) -> Option { - core_str::StrExt::rfind(self.index(&FullRange), pat) + core_str::StrExt::rfind(&self[], pat) } /// Returns the byte index of the first matching substring @@ -1156,7 +1156,7 @@ fn rfind(&self, pat: P) -> Option { /// ``` #[unstable = "might get removed in favor of a more generic find in the future"] fn find_str(&self, needle: &str) -> Option { - core_str::StrExt::find_str(self.index(&FullRange), needle) + core_str::StrExt::find_str(&self[], needle) } /// Retrieves the first character from a string slice and returns @@ -1179,7 +1179,7 @@ fn find_str(&self, needle: &str) -> Option { /// ``` #[unstable = "awaiting conventions about shifting and slices"] fn slice_shift_char(&self) -> Option<(char, &str)> { - core_str::StrExt::slice_shift_char(self.index(&FullRange)) + core_str::StrExt::slice_shift_char(&self[]) } /// Returns the byte offset of an inner slice relative to an enclosing outer slice. @@ -1198,7 +1198,7 @@ fn slice_shift_char(&self) -> Option<(char, &str)> { /// ``` #[unstable = "awaiting convention about comparability of arbitrary slices"] fn subslice_offset(&self, inner: &str) -> uint { - core_str::StrExt::subslice_offset(self.index(&FullRange), inner) + core_str::StrExt::subslice_offset(&self[], inner) } /// Return an unsafe pointer to the strings buffer. @@ -1209,13 +1209,13 @@ fn subslice_offset(&self, inner: &str) -> uint { #[stable] #[inline] fn as_ptr(&self) -> *const u8 { - core_str::StrExt::as_ptr(self.index(&FullRange)) + core_str::StrExt::as_ptr(&self[]) } /// Return an iterator of `u16` over the string encoded as UTF-16. #[unstable = "this functionality may only be provided by libunicode"] fn utf16_units(&self) -> Utf16Units { - Utf16Units { encoder: Utf16Encoder::new(self.index(&FullRange).chars()) } + Utf16Units { encoder: Utf16Encoder::new(self[].chars()) } } /// Return the number of bytes in this string @@ -1229,7 +1229,7 @@ fn utf16_units(&self) -> Utf16Units { #[stable] #[inline] fn len(&self) -> uint { - core_str::StrExt::len(self.index(&FullRange)) + core_str::StrExt::len(&self[]) } /// Returns true if this slice contains no bytes @@ -1242,7 +1242,7 @@ fn len(&self) -> uint { #[inline] #[stable] fn is_empty(&self) -> bool { - core_str::StrExt::is_empty(self.index(&FullRange)) + core_str::StrExt::is_empty(&self[]) } /// Parse this string into the specified type. @@ -1256,7 +1256,7 @@ fn is_empty(&self) -> bool { #[inline] #[unstable = "this method was just created"] fn parse(&self) -> Option { - core_str::StrExt::parse(self.index(&FullRange)) + core_str::StrExt::parse(&self[]) } /// Returns an iterator over the @@ -1280,7 +1280,7 @@ fn parse(&self) -> Option { /// ``` #[unstable = "this functionality may only be provided by libunicode"] fn graphemes(&self, is_extended: bool) -> Graphemes { - UnicodeStr::graphemes(self.index(&FullRange), is_extended) + UnicodeStr::graphemes(&self[], is_extended) } /// Returns an iterator over the grapheme clusters of self and their byte offsets. @@ -1295,7 +1295,7 @@ fn graphemes(&self, is_extended: bool) -> Graphemes { /// ``` #[unstable = "this functionality may only be provided by libunicode"] fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices { - UnicodeStr::grapheme_indices(self.index(&FullRange), is_extended) + UnicodeStr::grapheme_indices(&self[], is_extended) } /// An iterator over the words of a string (subsequences separated @@ -1311,7 +1311,7 @@ fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices { /// ``` #[stable] fn words(&self) -> Words { - UnicodeStr::words(self.index(&FullRange)) + UnicodeStr::words(&self[]) } /// Returns a string's displayed width in columns, treating control @@ -1325,25 +1325,25 @@ fn words(&self) -> Words { /// `is_cjk` = `false`) if the locale is unknown. #[unstable = "this functionality may only be provided by libunicode"] fn width(&self, is_cjk: bool) -> uint { - UnicodeStr::width(self.index(&FullRange), is_cjk) + UnicodeStr::width(&self[], is_cjk) } /// Returns a string with leading and trailing whitespace removed. #[stable] fn trim(&self) -> &str { - UnicodeStr::trim(self.index(&FullRange)) + UnicodeStr::trim(&self[]) } /// Returns a string with leading whitespace removed. #[stable] fn trim_left(&self) -> &str { - UnicodeStr::trim_left(self.index(&FullRange)) + UnicodeStr::trim_left(&self[]) } /// Returns a string with trailing whitespace removed. #[stable] fn trim_right(&self) -> &str { - UnicodeStr::trim_right(self.index(&FullRange)) + UnicodeStr::trim_right(&self[]) } } diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 507703c3a90..2b0027ef078 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -168,7 +168,7 @@ fn safe_get(xs: &[u8], i: uint, total: uint) -> u8 { if i > 0 { unsafe { - res.as_mut_vec().push_all(v.index(&(0..i))) + res.as_mut_vec().push_all(&v[0..i]) }; } @@ -185,7 +185,7 @@ fn safe_get(xs: &[u8], i: uint, total: uint) -> u8 { macro_rules! error { () => ({ unsafe { if subseqidx != i_ { - res.as_mut_vec().push_all(v.index(&(subseqidx..i_))); + res.as_mut_vec().push_all(&v[subseqidx..i_]); } subseqidx = i; res.as_mut_vec().push_all(REPLACEMENT); @@ -254,7 +254,7 @@ macro_rules! error { () => ({ } if subseqidx < total { unsafe { - res.as_mut_vec().push_all(v.index(&(subseqidx..total))) + res.as_mut_vec().push_all(&v[subseqidx..total]) }; } Cow::Owned(res) @@ -841,21 +841,21 @@ impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: &ops::Range) -> &str { - &self.index(&FullRange)[*index] + &self[][*index] } } impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: &ops::RangeTo) -> &str { - &self.index(&FullRange)[*index] + &self[][*index] } } impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: &ops::RangeFrom) -> &str { - &self.index(&FullRange)[*index] + &self[][*index] } } impl ops::Index for String { @@ -871,7 +871,7 @@ impl ops::Deref for String { type Target = str; fn deref<'a>(&'a self) -> &'a str { - unsafe { mem::transmute(self.vec.index(&FullRange)) } + unsafe { mem::transmute(&self.vec[]) } } } diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 312d739e3a4..04a2d5b5bc9 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1178,7 +1178,7 @@ fn clone_from(&mut self, other: &Vec) { // self.len <= other.len due to the truncate above, so the // slice here is always in-bounds. - let slice = other.index(&(self.len()..)); + let slice = &other[self.len()..]; self.push_all(slice); } } diff --git a/src/libcore/array.rs b/src/libcore/array.rs index 05db9e11760..0cea0b3d88e 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -18,7 +18,7 @@ use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering}; use fmt; use marker::Copy; -use ops::{Deref, FullRange, Index}; +use ops::{Deref, FullRange}; use option::Option; // macro for implementing n-ary tuple functions and operations @@ -35,7 +35,7 @@ macro_rules! array_impls { #[unstable = "waiting for Show to stabilize"] impl fmt::Show for [T; $N] { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Show::fmt(&self.index(&FullRange), f) + fmt::Show::fmt(&&self[], f) } } @@ -43,11 +43,11 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { impl PartialEq<[B; $N]> for [A; $N] where A: PartialEq { #[inline] fn eq(&self, other: &[B; $N]) -> bool { - self.index(&FullRange) == other.index(&FullRange) + &self[] == &other[] } #[inline] fn ne(&self, other: &[B; $N]) -> bool { - self.index(&FullRange) != other.index(&FullRange) + &self[] != &other[] } } @@ -58,11 +58,11 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { { #[inline(always)] fn eq(&self, other: &Rhs) -> bool { - PartialEq::eq(self.index(&FullRange), &**other) + PartialEq::eq(&self[], &**other) } #[inline(always)] fn ne(&self, other: &Rhs) -> bool { - PartialEq::ne(self.index(&FullRange), &**other) + PartialEq::ne(&self[], &**other) } } @@ -73,11 +73,11 @@ fn ne(&self, other: &Rhs) -> bool { { #[inline(always)] fn eq(&self, other: &[B; $N]) -> bool { - PartialEq::eq(&**self, other.index(&FullRange)) + PartialEq::eq(&**self, &other[]) } #[inline(always)] fn ne(&self, other: &[B; $N]) -> bool { - PartialEq::ne(&**self, other.index(&FullRange)) + PartialEq::ne(&**self, &other[]) } } @@ -88,23 +88,23 @@ fn ne(&self, other: &Rhs) -> bool { impl PartialOrd for [T; $N] { #[inline] fn partial_cmp(&self, other: &[T; $N]) -> Option { - PartialOrd::partial_cmp(&self.index(&FullRange), &other.index(&FullRange)) + PartialOrd::partial_cmp(&&self[], &&other[]) } #[inline] fn lt(&self, other: &[T; $N]) -> bool { - PartialOrd::lt(&self.index(&FullRange), &other.index(&FullRange)) + PartialOrd::lt(&&self[], &&other[]) } #[inline] fn le(&self, other: &[T; $N]) -> bool { - PartialOrd::le(&self.index(&FullRange), &other.index(&FullRange)) + PartialOrd::le(&&self[], &&other[]) } #[inline] fn ge(&self, other: &[T; $N]) -> bool { - PartialOrd::ge(&self.index(&FullRange), &other.index(&FullRange)) + PartialOrd::ge(&&self[], &&other[]) } #[inline] fn gt(&self, other: &[T; $N]) -> bool { - PartialOrd::gt(&self.index(&FullRange), &other.index(&FullRange)) + PartialOrd::gt(&&self[], &&other[]) } } @@ -112,7 +112,7 @@ fn ne(&self, other: &Rhs) -> bool { impl Ord for [T; $N] { #[inline] fn cmp(&self, other: &[T; $N]) -> Ordering { - Ord::cmp(&self.index(&FullRange), &other.index(&FullRange)) + Ord::cmp(&&self[], &&other[]) } } )+ diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index d833b8fed77..0ffcb014c28 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -20,7 +20,7 @@ use iter::{IteratorExt, range}; use num::{cast, Float, ToPrimitive}; use num::FpCategory as Fp; -use ops::{FnOnce, Index}; +use ops::FnOnce; use result::Result::Ok; use slice::{self, SliceExt}; use str::{self, StrExt}; @@ -332,5 +332,5 @@ fn write_str(&mut self, s: &str) -> fmt::Result { } } - f(unsafe { str::from_utf8_unchecked(buf.index(&(0..end))) }) + f(unsafe { str::from_utf8_unchecked(&buf[0..end]) }) } diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 54b0f0d8d35..da59173a3d6 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -21,7 +21,7 @@ use option::Option; use option::Option::{Some, None}; use result::Result::Ok; -use ops::{Deref, FnOnce, Index}; +use ops::{Deref, FnOnce}; use result; use slice::SliceExt; use slice; @@ -424,7 +424,7 @@ pub fn pad_integral(&mut self, for c in sign.into_iter() { let mut b = [0; 4]; let n = c.encode_utf8(&mut b).unwrap_or(0); - let b = unsafe { str::from_utf8_unchecked(b.index(&(0..n))) }; + let b = unsafe { str::from_utf8_unchecked(&b[0..n]) }; try!(f.buf.write_str(b)); } if prefixed { f.buf.write_str(prefix) } @@ -532,7 +532,7 @@ fn with_padding(&mut self, padding: uint, default: rt::Alignment, f: F) -> Re let mut fill = [0u8; 4]; let len = self.fill.encode_utf8(&mut fill).unwrap_or(0); - let fill = unsafe { str::from_utf8_unchecked(fill.index(&(..len))) }; + let fill = unsafe { str::from_utf8_unchecked(&fill[..len]) }; for _ in range(0, pre_pad) { try!(self.buf.write_str(fill)); @@ -666,7 +666,7 @@ impl String for char { fn fmt(&self, f: &mut Formatter) -> Result { let mut utf8 = [0u8; 4]; let amt = self.encode_utf8(&mut utf8).unwrap_or(0); - let s: &str = unsafe { mem::transmute(utf8.index(&(0..amt))) }; + let s: &str = unsafe { mem::transmute(&utf8[0..amt]) }; String::fmt(s, f) } } diff --git a/src/libcore/fmt/num.rs b/src/libcore/fmt/num.rs index 905001cd567..1df6f845225 100644 --- a/src/libcore/fmt/num.rs +++ b/src/libcore/fmt/num.rs @@ -16,7 +16,6 @@ use fmt; use iter::IteratorExt; -use ops::Index; use num::{Int, cast}; use slice::SliceExt; use str; @@ -62,7 +61,7 @@ fn fmt_int(&self, mut x: T, f: &mut fmt::Formatter) -> fmt::Result { if x == zero { break }; // No more digits left to accumulate. } } - let buf = unsafe { str::from_utf8_unchecked(buf.index(&(curr..))) }; + let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) }; f.pad_integral(is_positive, self.prefix(), buf) } } diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 490d8111f46..a6f9d11eede 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -24,7 +24,7 @@ use marker::Copy; use mem::size_of; use ops::{Add, Sub, Mul, Div, Rem, Neg}; -use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr, Index}; +use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr}; use option::Option; use option::Option::{Some, None}; use str::{FromStr, StrExt}; @@ -1577,7 +1577,7 @@ fn from_str_radix(src: &str, radix: uint) -> Option<$T> { }; // Parse the exponent as decimal integer - let src = src.index(&(offset..)); + let src = &src[offset..]; let (is_positive, exp) = match src.slice_shift_char() { Some(('-', src)) => (false, src.parse::()), Some(('+', src)) => (true, src.parse::()), diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index bf2df465370..6c62bfda1fe 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -159,7 +159,7 @@ fn slice_to(&self, end: uint) -> &[T] { #[inline] fn split_at(&self, mid: uint) -> (&[T], &[T]) { - (self.index(&(0..mid)), self.index(&(mid..))) + (&self[0..mid], &self[mid..]) } #[inline] @@ -236,11 +236,11 @@ fn first(&self) -> Option<&T> { } #[inline] - fn tail(&self) -> &[T] { self.index(&(1..)) } + fn tail(&self) -> &[T] { &self[1..] } #[inline] fn init(&self) -> &[T] { - self.index(&(0..(self.len() - 1))) + &self[0..(self.len() - 1)] } #[inline] @@ -443,13 +443,13 @@ fn contains(&self, x: &T) -> bool where T: PartialEq { #[inline] fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq { let n = needle.len(); - self.len() >= n && needle == self.index(&(0..n)) + self.len() >= n && needle == &self[0..n] } #[inline] fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq { let (m, n) = (self.len(), needle.len()); - m >= n && needle == self.index(&((m-n)..)) + m >= n && needle == &self[(m-n)..] } #[unstable] @@ -972,8 +972,8 @@ fn next(&mut self) -> Option<&'a [T]> { match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(self.v.index(&(0..idx))); - self.v = self.v.index(&((idx + 1)..)); + let ret = Some(&self.v[0..idx]); + self.v = &self.v[(idx + 1)..]; ret } } @@ -998,8 +998,8 @@ fn next_back(&mut self) -> Option<&'a [T]> { match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(self.v.index(&((idx + 1)..))); - self.v = self.v.index(&(0..idx)); + let ret = Some(&self.v[(idx + 1)..]); + self.v = &self.v[0..idx]; ret } } @@ -1195,8 +1195,8 @@ fn next(&mut self) -> Option<&'a [T]> { if self.size > self.v.len() { None } else { - let ret = Some(self.v.index(&(0..self.size))); - self.v = self.v.index(&(1..)); + let ret = Some(&self.v[0..self.size]); + self.v = &self.v[1..]; ret } } @@ -1283,7 +1283,7 @@ fn idx(&mut self, index: uint) -> Option<&'a [T]> { let mut hi = lo + self.size; if hi < lo || hi > self.v.len() { hi = self.v.len(); } - Some(self.v.index(&(lo..hi))) + Some(&self.v[lo..hi]) } else { None } diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index 3f8ce000e21..6051c68b116 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -26,7 +26,7 @@ use marker::Sized; use mem; use num::Int; -use ops::{Fn, FnMut, Index}; +use ops::{Fn, FnMut}; use option::Option::{self, None, Some}; use ptr::PtrExt; use raw::{Repr, Slice}; @@ -580,7 +580,7 @@ fn new() -> NaiveSearcher { fn next(&mut self, haystack: &[u8], needle: &[u8]) -> Option<(uint, uint)> { while self.position + needle.len() <= haystack.len() { - if haystack.index(&(self.position .. self.position + needle.len())) == needle { + if &haystack[self.position .. self.position + needle.len()] == needle { let match_pos = self.position; self.position += needle.len(); // add 1 for all matches return Some((match_pos, match_pos + needle.len())); @@ -701,10 +701,10 @@ fn new(needle: &[u8]) -> TwoWaySearcher { // // What's going on is we have some critical factorization (u, v) of the // needle, and we want to determine whether u is a suffix of - // v.index(&(0..period)). If it is, we use "Algorithm CP1". Otherwise we use + // &v[0..period]. If it is, we use "Algorithm CP1". Otherwise we use // "Algorithm CP2", which is optimized for when the period of the needle // is large. - if needle.index(&(0..crit_pos)) == needle.index(&(period.. period + crit_pos)) { + if &needle[0..crit_pos] == &needle[period.. period + crit_pos] { TwoWaySearcher { crit_pos: crit_pos, period: period, @@ -1412,13 +1412,13 @@ unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str { #[inline] fn starts_with(&self, needle: &str) -> bool { let n = needle.len(); - self.len() >= n && needle.as_bytes() == self.as_bytes().index(&(0..n)) + self.len() >= n && needle.as_bytes() == &self.as_bytes()[0..n] } #[inline] fn ends_with(&self, needle: &str) -> bool { let (m, n) = (self.len(), needle.len()); - m >= n && needle.as_bytes() == self.as_bytes().index(&((m-n)..)) + m >= n && needle.as_bytes() == &self.as_bytes()[(m-n)..] } #[inline] diff --git a/src/libcoretest/char.rs b/src/libcoretest/char.rs index f901e800176..7b6b4f84808 100644 --- a/src/libcoretest/char.rs +++ b/src/libcoretest/char.rs @@ -167,7 +167,7 @@ fn test_encode_utf8() { fn check(input: char, expect: &[u8]) { let mut buf = [0u8; 4]; let n = input.encode_utf8(buf.as_mut_slice()).unwrap_or(0); - assert_eq!(buf.index(&(0..n)), expect); + assert_eq!(&buf[0..n], expect); } check('x', &[0x78]); @@ -181,7 +181,7 @@ fn test_encode_utf16() { fn check(input: char, expect: &[u16]) { let mut buf = [0u16; 2]; let n = input.encode_utf16(buf.as_mut_slice()).unwrap_or(0); - assert_eq!(buf.index(&(0..n)), expect); + assert_eq!(&buf[0..n], expect); } check('x', &[0x0078]); diff --git a/src/libcoretest/slice.rs b/src/libcoretest/slice.rs index b714b6a4e41..6fae384763f 100644 --- a/src/libcoretest/slice.rs +++ b/src/libcoretest/slice.rs @@ -57,17 +57,17 @@ macro_rules! test { } { let mut iter = data.iter_mut(); - assert_eq!(iter.index(&FullRange), other_data.index(&FullRange)); + assert_eq!(&iter[], &other_data[]); // mutability: assert!(&mut iter[] == other_data); iter.next(); - assert_eq!(iter.index(&FullRange), other_data.index(&(1..))); + assert_eq!(&iter[], &other_data[1..]); assert!(&mut iter[] == &mut other_data[1..]); iter.next_back(); - assert_eq!(iter.index(&FullRange), other_data.index(&(1..2))); + assert_eq!(&iter[], &other_data[1..2]); assert!(&mut iter[] == &mut other_data[1..2]); let s = iter.into_slice(); diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 47cc072a636..6e00b7688bc 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -211,12 +211,12 @@ fn must_consume(&mut self, c: char) { self.cur.next(); } Some((_, other)) => { - self.err(format!("expected `{:?}`, found `{:?}`", c, - other).index(&FullRange)); + self.err(&format!("expected `{:?}`, found `{:?}`", c, + other)[]); } None => { - self.err(format!("expected `{:?}` but string was terminated", - c).index(&FullRange)); + self.err(&format!("expected `{:?}` but string was terminated", + c)[]); } } } @@ -239,12 +239,12 @@ fn string(&mut self, start: uint) -> &'a str { // we may not consume the character, so clone the iterator match self.cur.clone().next() { Some((pos, '}')) | Some((pos, '{')) => { - return self.input.index(&(start..pos)); + return &self.input[start..pos]; } Some(..) => { self.cur.next(); } None => { self.cur.next(); - return self.input.index(&(start..self.input.len())); + return &self.input[start..self.input.len()]; } } } @@ -284,7 +284,7 @@ fn format(&mut self) -> FormatSpec<'a> { flags: 0, precision: CountImplied, width: CountImplied, - ty: self.input.index(&(0..0)), + ty: &self.input[0..0], }; if !self.consume(':') { return spec } @@ -393,7 +393,7 @@ fn word(&mut self) -> &'a str { self.cur.next(); pos } - Some(..) | None => { return self.input.index(&(0..0)); } + Some(..) | None => { return &self.input[0..0]; } }; let mut end; loop { @@ -405,7 +405,7 @@ fn word(&mut self) -> &'a str { None => { end = self.input.len(); break } } } - self.input.index(&(start..end)) + &self.input[start..end] } /// Optionally parses an integer at the current position. This doesn't deal diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index f50e24c6354..32c2e4467fd 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -280,7 +280,7 @@ pub fn long_to_short(&self) -> Opt { impl Matches { fn opt_vals(&self, nm: &str) -> Vec { - match find_opt(self.opts.index(&FullRange), Name::from_str(nm)) { + match find_opt(&self.opts[], Name::from_str(nm)) { Some(id) => self.vals[id].clone(), None => panic!("No option '{}' defined", nm) } @@ -308,7 +308,7 @@ pub fn opt_count(&self, nm: &str) -> uint { /// Returns true if any of several options were matched. pub fn opts_present(&self, names: &[String]) -> bool { for nm in names.iter() { - match find_opt(self.opts.as_slice(), Name::from_str(nm.index(&FullRange))) { + match find_opt(self.opts.as_slice(), Name::from_str(&nm[])) { Some(id) if !self.vals[id].is_empty() => return true, _ => (), }; @@ -319,7 +319,7 @@ pub fn opts_present(&self, names: &[String]) -> bool { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option { for nm in names.iter() { - match self.opt_val(nm.index(&FullRange)) { + match self.opt_val(&nm[]) { Some(Val(ref s)) => return Some(s.clone()), _ => () } @@ -584,7 +584,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { while i < l { let cur = args[i].clone(); let curlen = cur.len(); - if !is_arg(cur.index(&FullRange)) { + if !is_arg(&cur[]) { free.push(cur); } else if cur == "--" { let mut j = i + 1; @@ -594,7 +594,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { let mut names; let mut i_arg = None; if cur.as_bytes()[1] == b'-' { - let tail = cur.index(&(2..curlen)); + let tail = &cur[2..curlen]; let tail_eq: Vec<&str> = tail.split('=').collect(); if tail_eq.len() <= 1 { names = vec!(Long(tail.to_string())); @@ -630,7 +630,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { }; if arg_follows && range.next < curlen { - i_arg = Some(cur.index(&(range.next..curlen)).to_string()); + i_arg = Some((&cur[range.next..curlen]).to_string()); break; } @@ -658,7 +658,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { v.push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || - is_arg(args[i + 1].index(&FullRange)) { + is_arg(&args[i + 1][]) { let v = &mut vals[optid]; v.push(Given); } else { @@ -721,7 +721,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} 1 => { row.push('-'); - row.push_str(short_name.index(&FullRange)); + row.push_str(&short_name[]); row.push(' '); } _ => panic!("the short name should only be 1 ascii char long"), @@ -732,7 +732,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} _ => { row.push_str("--"); - row.push_str(long_name.index(&FullRange)); + row.push_str(&long_name[]); row.push(' '); } } @@ -740,10 +740,10 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // arg match hasarg { No => {} - Yes => row.push_str(hint.index(&FullRange)), + Yes => row.push_str(&hint[]), Maybe => { row.push('['); - row.push_str(hint.index(&FullRange)); + row.push_str(&hint[]); row.push(']'); } } @@ -756,7 +756,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { row.push(' '); } } else { - row.push_str(desc_sep.index(&FullRange)); + row.push_str(&desc_sep[]); } // Normalize desc to contain words separated by one space character @@ -768,14 +768,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); - each_split_within(desc_normalized_whitespace.index(&FullRange), 54, |substr| { + each_split_within(&desc_normalized_whitespace[], 54, |substr| { desc_rows.push(substr.to_string()); true }); // FIXME: #5516 should be graphemes not codepoints // wrapped description - row.push_str(desc_rows.connect(desc_sep.index(&FullRange)).index(&FullRange)); + row.push_str(&desc_rows.connect(&desc_sep[])[]); row }); @@ -794,10 +794,10 @@ fn format_option(opt: &OptGroup) -> String { // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push('-'); - line.push_str(opt.short_name.index(&FullRange)); + line.push_str(&opt.short_name[]); } else { line.push_str("--"); - line.push_str(opt.long_name.index(&FullRange)); + line.push_str(&opt.long_name[]); } if opt.hasarg != No { @@ -805,7 +805,7 @@ fn format_option(opt: &OptGroup) -> String { if opt.hasarg == Maybe { line.push('['); } - line.push_str(opt.hint.index(&FullRange)); + line.push_str(&opt.hint[]); if opt.hasarg == Maybe { line.push(']'); } @@ -824,10 +824,10 @@ fn format_option(opt: &OptGroup) -> String { /// Derive a short one-line usage summary from a set of long options. pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { let mut line = format!("Usage: {} ", program_name); - line.push_str(opts.iter() - .map(format_option) - .collect::>() - .connect(" ").index(&FullRange)); + line.push_str(&opts.iter() + .map(format_option) + .collect::>() + .connect(" ")[]); line } @@ -890,9 +890,9 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where (B, Cr, UnderLim) => { B } (B, Cr, OverLim) if (i - last_start + 1) > lim => panic!("word starting with {} longer than limit!", - ss.index(&(last_start..(i + 1)))), + &ss[last_start..(i + 1)]), (B, Cr, OverLim) => { - *cont = it(ss.index(&(slice_start..last_end))); + *cont = it(&ss[slice_start..last_end]); slice_start = last_start; B } @@ -902,7 +902,7 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where } (B, Ws, OverLim) => { last_end = i; - *cont = it(ss.index(&(slice_start..last_end))); + *cont = it(&ss[slice_start..last_end]); A } @@ -911,14 +911,14 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where B } (C, Cr, OverLim) => { - *cont = it(ss.index(&(slice_start..last_end))); + *cont = it(&ss[slice_start..last_end]); slice_start = i; last_start = i; last_end = i; B } (C, Ws, OverLim) => { - *cont = it(ss.index(&(slice_start..last_end))); + *cont = it(&ss[slice_start..last_end]); A } (C, Ws, UnderLim) => { diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 83bad70e7b1..cb831f990b5 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -452,7 +452,7 @@ fn escape_str(s: &str) -> String { pub fn escape(&self) -> String { match self { &LabelStr(ref s) => s.escape_default(), - &EscStr(ref s) => LabelText::escape_str(s.index(&FullRange)), + &EscStr(ref s) => LabelText::escape_str(&s[]), } } @@ -481,7 +481,7 @@ pub fn suffix_line(self, suffix: LabelText) -> LabelText<'static> { let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(suffix.index(&FullRange)); + prefix.push_str(&suffix[]); EscStr(prefix.into_cow()) } } @@ -675,7 +675,7 @@ fn id_name<'a>(n: &Node) -> Id<'a> { impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph { fn graph_id(&'a self) -> Id<'a> { - Id::new(self.name.index(&FullRange)).unwrap() + Id::new(&self.name[]).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index 08b01e956e1..422556318e2 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -287,7 +287,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. match unsafe { FILTER.as_ref() } { - Some(filter) if !filter.is_match(args.to_string().index(&FullRange)) => return, + Some(filter) if !filter.is_match(&args.to_string()[]) => return, _ => {} } @@ -382,7 +382,7 @@ fn enabled(level: u32, // Search for the longest match, the vector is assumed to be pre-sorted. for directive in iter.rev() { match directive.name { - Some(ref name) if !module.starts_with(name.index(&FullRange)) => {}, + Some(ref name) if !module.starts_with(&name[]) => {}, Some(..) | None => { return level <= directive.level } @@ -397,7 +397,7 @@ fn enabled(level: u32, /// `Once` primitive (and this function is called from that primitive). fn init() { let (mut directives, filter) = match os::getenv("RUST_LOG") { - Some(spec) => directive::parse_logging_spec(spec.index(&FullRange)), + Some(spec) => directive::parse_logging_spec(&spec[]), None => (Vec::new(), None), }; diff --git a/src/librand/lib.rs b/src/librand/lib.rs index 9d33f65cd59..838caab4bbe 100644 --- a/src/librand/lib.rs +++ b/src/librand/lib.rs @@ -270,7 +270,7 @@ fn gen_ascii_chars<'a>(&'a mut self) -> AsciiGenerator<'a, Self> { /// let mut rng = thread_rng(); /// println!("{:?}", rng.choose(&choices)); /// # // uncomment when slicing syntax is stable - /// //assert_eq!(rng.choose(choices.index(&(0..0))), None); + /// //assert_eq!(rng.choose(&choices[0..0]), None); /// ``` fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> { if values.is_empty() { diff --git a/src/librbml/io.rs b/src/librbml/io.rs index 5ebec32d733..bdc00d7db97 100644 --- a/src/librbml/io.rs +++ b/src/librbml/io.rs @@ -95,7 +95,7 @@ fn write(&mut self, buf: &[u8]) -> IoResult<()> { // there (left), and what will be appended on the end (right) let cap = self.buf.len() - self.pos; let (left, right) = if cap <= buf.len() { - (buf.index(&(0..cap)), buf.index(&(cap..))) + (&buf[0..cap], &buf[cap..]) } else { let result: (_, &[_]) = (buf, &[]); result diff --git a/src/librbml/lib.rs b/src/librbml/lib.rs index a66d1dd08c1..8989e77184b 100644 --- a/src/librbml/lib.rs +++ b/src/librbml/lib.rs @@ -56,7 +56,7 @@ pub fn get<'a>(&'a self, tag: uint) -> Doc<'a> { } pub fn as_str_slice<'a>(&'a self) -> &'a str { - str::from_utf8(self.data.index(&(self.start..self.end))).unwrap() + str::from_utf8(&self.data[self.start..self.end]).unwrap() } pub fn as_str(&self) -> String { @@ -291,7 +291,7 @@ pub fn tagged_docs(d: Doc, tg: uint, mut it: F) -> bool where pub fn with_doc_data(d: Doc, f: F) -> T where F: FnOnce(&[u8]) -> T, { - f(d.data.index(&(d.start..d.end))) + f(&d.data[d.start..d.end]) } diff --git a/src/libregex/compile.rs b/src/libregex/compile.rs index 5803da1d335..d29a7a425c1 100644 --- a/src/libregex/compile.rs +++ b/src/libregex/compile.rs @@ -105,7 +105,7 @@ pub fn new(ast: parse::Ast) -> (Program, Vec>) { // This is a bit hacky since we have to skip over the initial // 'Save' instruction. let mut pre = String::with_capacity(5); - for inst in c.insts.index(&(1..)).iter() { + for inst in c.insts[1..].iter() { match *inst { OneChar(c, FLAG_EMPTY) => pre.push(c), _ => break diff --git a/src/libregex/parse.rs b/src/libregex/parse.rs index dd11d42b8aa..1cc2b271e9c 100644 --- a/src/libregex/parse.rs +++ b/src/libregex/parse.rs @@ -18,7 +18,6 @@ use std::fmt; use std::iter; use std::num; -use std::ops::Index; /// Static data containing Unicode ranges for general categories and scripts. use unicode::regex::{UNICODE_CLASSES, PERLD, PERLS, PERLW}; @@ -285,8 +284,8 @@ fn noteof(&mut self, expected: &str) -> Result<(), Error> { match self.next_char() { true => Ok(()), false => { - self.err(format!("Expected {:?} but got EOF.", - expected).index(&FullRange)) + self.err(&format!("Expected {:?} but got EOF.", + expected)[]) } } } @@ -294,11 +293,11 @@ fn noteof(&mut self, expected: &str) -> Result<(), Error> { fn expect(&mut self, expected: char) -> Result<(), Error> { match self.next_char() { true if self.cur() == expected => Ok(()), - true => self.err(format!("Expected '{:?}' but got '{:?}'.", - expected, self.cur()).index(&FullRange)), + true => self.err(&format!("Expected '{:?}' but got '{:?}'.", + expected, self.cur())[]), false => { - self.err(format!("Expected '{:?}' but got EOF.", - expected).index(&FullRange)) + self.err(&format!("Expected '{:?}' but got EOF.", + expected)[]) } } } @@ -443,15 +442,15 @@ fn parse_class(&mut self) -> Result<(), Error> { match try!(self.parse_escape()) { Literal(c3, _) => c2 = c3, // allow literal escapes below ast => - return self.err(format!("Expected a literal, but got {:?}.", - ast).index(&FullRange)), + return self.err(&format!("Expected a literal, but got {:?}.", + ast)[]), } } if c2 < c { - return self.err(format!("Invalid character class \ - range '{}-{}'", - c, - c2).index(&FullRange)) + return self.err(&format!("Invalid character class \ + range '{}-{}'", + c, + c2)[]) } ranges.push((c, self.cur())) } else { @@ -489,7 +488,7 @@ fn try_parse_ascii(&mut self) -> Option { FLAG_EMPTY }; let name = self.slice(name_start, closer - 1); - match find_class(ASCII_CLASSES, name.index(&FullRange)) { + match find_class(ASCII_CLASSES, &name[]) { None => None, Some(ranges) => { self.chari = closer; @@ -511,21 +510,21 @@ fn parse_counted(&mut self) -> Result<(), Error> { match self.pos('}') { Some(i) => i, None => { - return self.err(format!("No closing brace for counted \ - repetition starting at position \ - {:?}.", - start).index(&FullRange)) + return self.err(&format!("No closing brace for counted \ + repetition starting at position \ + {:?}.", + start)[]) } }; self.chari = closer; let greed = try!(self.get_next_greedy()); - let inner = self.chars.index(&((start+1)..closer)).iter().cloned() + let inner = self.chars[(start+1)..closer].iter().cloned() .collect::(); // Parse the min and max values from the regex. let (mut min, mut max): (uint, Option); if !inner.contains(",") { - min = try!(self.parse_uint(inner.index(&FullRange))); + min = try!(self.parse_uint(&inner[])); max = Some(min); } else { let pieces: Vec<&str> = inner.splitn(1, ',').collect(); @@ -545,21 +544,21 @@ fn parse_counted(&mut self) -> Result<(), Error> { // Do some bounds checking and make sure max >= min. if min > MAX_REPEAT { - return self.err(format!( + return self.err(&format!( "{} exceeds maximum allowed repetitions ({})", - min, MAX_REPEAT).index(&FullRange)); + min, MAX_REPEAT)[]); } if max.is_some() { let m = max.unwrap(); if m > MAX_REPEAT { - return self.err(format!( + return self.err(&format!( "{} exceeds maximum allowed repetitions ({})", - m, MAX_REPEAT).index(&FullRange)); + m, MAX_REPEAT)[]); } if m < min { - return self.err(format!( + return self.err(&format!( "Max repetitions ({}) cannot be smaller than min \ - repetitions ({}).", m, min).index(&FullRange)); + repetitions ({}).", m, min)[]); } } @@ -623,7 +622,7 @@ fn parse_escape(&mut self) -> Result { Ok(AstClass(ranges, flags)) } _ => { - self.err(format!("Invalid escape sequence '\\\\{}'", c).index(&FullRange)) + self.err(&format!("Invalid escape sequence '\\\\{}'", c)[]) } } } @@ -641,9 +640,9 @@ fn parse_unicode_name(&mut self) -> Result { let closer = match self.pos('}') { Some(i) => i, - None => return self.err(format!( + None => return self.err(&format!( "Missing '}}' for unclosed '{{' at position {}", - self.chari).index(&FullRange)), + self.chari)[]), }; if closer - self.chari + 1 == 0 { return self.err("No Unicode class name found.") @@ -657,10 +656,10 @@ fn parse_unicode_name(&mut self) -> Result { name = self.slice(self.chari + 1, self.chari + 2); self.chari += 1; } - match find_class(UNICODE_CLASSES, name.index(&FullRange)) { + match find_class(UNICODE_CLASSES, &name[]) { None => { - return self.err(format!("Could not find Unicode class '{}'", - name).index(&FullRange)) + return self.err(&format!("Could not find Unicode class '{}'", + name)[]) } Some(ranges) => { Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE))) @@ -683,11 +682,11 @@ fn parse_octal(&mut self) -> Result { } } let s = self.slice(start, end); - match num::from_str_radix::(s.index(&FullRange), 8) { + match num::from_str_radix::(&s[], 8) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { - self.err(format!("Could not parse '{:?}' as octal number.", - s).index(&FullRange)) + self.err(&format!("Could not parse '{:?}' as octal number.", + s)[]) } } } @@ -703,14 +702,14 @@ fn parse_hex(&mut self) -> Result { let closer = match self.pos('}') { None => { - return self.err(format!("Missing '}}' for unclosed \ + return self.err(&format!("Missing '}}' for unclosed \ '{{' at position {}", - start).index(&FullRange)) + start)[]) } Some(i) => i, }; self.chari = closer; - self.parse_hex_digits(self.slice(start, closer).index(&FullRange)) + self.parse_hex_digits(&self.slice(start, closer)[]) } // Parses a two-digit hex number. @@ -730,7 +729,7 @@ fn parse_hex_digits(&self, s: &str) -> Result { match num::from_str_radix::(s, 16) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { - self.err(format!("Could not parse '{}' as hex number.", s).index(&FullRange)) + self.err(&format!("Could not parse '{}' as hex number.", s)[]) } } } @@ -755,8 +754,8 @@ fn parse_named_capture(&mut self) -> Result<(), Error> { "Capture names can only have underscores, letters and digits.") } if self.names.contains(&name) { - return self.err(format!("Duplicate capture group name '{}'.", - name).index(&FullRange)) + return self.err(&format!("Duplicate capture group name '{}'.", + name)[]) } self.names.push(name.clone()); self.chari = closer; @@ -788,9 +787,9 @@ fn parse_group_opts(&mut self) -> Result<(), Error> { 'U' => { flags = flags | FLAG_SWAP_GREED; saw_flag = true}, '-' => { if sign < 0 { - return self.err(format!( + return self.err(&format!( "Cannot negate flags twice in '{}'.", - self.slice(start, self.chari + 1)).index(&FullRange)) + self.slice(start, self.chari + 1))[]) } sign = -1; saw_flag = false; @@ -799,9 +798,9 @@ fn parse_group_opts(&mut self) -> Result<(), Error> { ':' | ')' => { if sign < 0 { if !saw_flag { - return self.err(format!( + return self.err(&format!( "A valid flag does not follow negation in '{}'", - self.slice(start, self.chari + 1)).index(&FullRange)) + self.slice(start, self.chari + 1))[]) } flags = flags ^ flags; } @@ -812,8 +811,8 @@ fn parse_group_opts(&mut self) -> Result<(), Error> { self.flags = flags; return Ok(()) } - _ => return self.err(format!( - "Unrecognized flag '{}'.", self.cur()).index(&FullRange)), + _ => return self.err(&format!( + "Unrecognized flag '{}'.", self.cur())[]), } } } @@ -910,8 +909,8 @@ fn parse_uint(&self, s: &str) -> Result { match s.parse::() { Some(i) => Ok(i), None => { - self.err(format!("Expected an unsigned integer but got '{}'.", - s).index(&FullRange)) + self.err(&format!("Expected an unsigned integer but got '{}'.", + s)[]) } } } @@ -920,8 +919,8 @@ fn char_from_u32(&self, n: u32) -> Result { match char::from_u32(n) { Some(c) => Ok(c), None => { - self.err(format!("Could not decode '{}' to unicode \ - character.", n).index(&FullRange)) + self.err(&format!("Could not decode '{}' to unicode \ + character.", n)[]) } } } @@ -954,7 +953,7 @@ fn cur(&self) -> char { } fn slice(&self, start: uint, end: uint) -> String { - self.chars.index(&(start..end)).iter().cloned().collect() + self.chars[start..end].iter().cloned().collect() } } diff --git a/src/libregex/re.rs b/src/libregex/re.rs index acaf2e9b4cb..16dd32b6be2 100644 --- a/src/libregex/re.rs +++ b/src/libregex/re.rs @@ -238,19 +238,19 @@ pub fn replacen } let (s, e) = cap.pos(0).unwrap(); // captures only reports matches - new.push_str(text.index(&(last_match..s))); - new.push_str(rep.reg_replace(&cap).index(&FullRange)); + new.push_str(&text[last_match..s]); + new.push_str(&rep.reg_replace(&cap)[]); last_match = e; } - new.push_str(text.index(&(last_match..text.len()))); + new.push_str(&text[last_match..text.len()]); return new; } /// Returns the original string of this regex. pub fn as_str<'a>(&'a self) -> &'a str { match *self { - Dynamic(ExDynamic { ref original, .. }) => original.index(&FullRange), - Native(ExNative { ref original, .. }) => original.index(&FullRange), + Dynamic(ExDynamic { ref original, .. }) => &original[], + Native(ExNative { ref original, .. }) => &original[], } } @@ -347,13 +347,13 @@ fn next(&mut self) -> Option<&'t str> { if self.last >= text.len() { None } else { - let s = text.index(&(self.last..text.len())); + let s = &text[self.last..text.len()]; self.last = text.len(); Some(s) } } Some((s, e)) => { - let matched = text.index(&(self.last..s)); + let matched = &text[self.last..s]; self.last = e; Some(matched) } @@ -384,7 +384,7 @@ fn next(&mut self) -> Option<&'t str> { } else { self.cur += 1; if self.cur >= self.limit { - Some(text.index(&(self.splits.last..text.len()))) + Some(&text[self.splits.last..text.len()]) } else { self.splits.next() } @@ -517,7 +517,7 @@ pub fn expand(&self, text: &str) -> String { }) }); let re = Regex::new(r"\$\$").unwrap(); - re.replace_all(text.index(&FullRange), NoExpand("$")) + re.replace_all(&text[], NoExpand("$")) } /// Returns the number of captured groups. diff --git a/src/libregex/vm.rs b/src/libregex/vm.rs index 04c430da4d2..9605536a052 100644 --- a/src/libregex/vm.rs +++ b/src/libregex/vm.rs @@ -152,7 +152,7 @@ fn run(&mut self) -> CaptureLocs { // out early. if self.prog.prefix.len() > 0 && clist.size == 0 { let needle = self.prog.prefix.as_bytes(); - let haystack = self.input.as_bytes().index(&(self.ic..)); + let haystack = &self.input.as_bytes()[self.ic..]; match find_prefix(needle, haystack) { None => break, Some(i) => { diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 1af8e2f29eb..41d237dba2e 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -506,7 +506,7 @@ fn check_heap_type<'a, 'tcx>(&self, cx: &Context<'a, 'tcx>, if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); - cx.span_lint(BOX_POINTERS, span, m.index(&FullRange)); + cx.span_lint(BOX_POINTERS, span, &m[]); } } } @@ -586,7 +586,7 @@ fn get_lints(&self) -> LintArray { } fn check_item(&mut self, cx: &Context, item: &ast::Item) { - if !attr::contains_name(item.attrs.index(&FullRange), "automatically_derived") { + if !attr::contains_name(&item.attrs[], "automatically_derived") { return } let did = match item.node { @@ -770,11 +770,11 @@ fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { ty::ty_enum(did, _) => { if ast_util::is_local(did) { if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) { - warned |= check_must_use(cx, it.attrs.index(&FullRange), s.span); + warned |= check_must_use(cx, &it.attrs[], s.span); } } else { csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| { - warned |= check_must_use(cx, attrs.index(&FullRange), s.span); + warned |= check_must_use(cx, &attrs[], s.span); }); } } @@ -796,7 +796,7 @@ fn check_must_use(cx: &Context, attrs: &[ast::Attribute], sp: Span) -> bool { msg.push_str(s.get()); } } - cx.span_lint(UNUSED_MUST_USE, sp, msg.index(&FullRange)); + cx.span_lint(UNUSED_MUST_USE, sp, &msg[]); return true; } } @@ -842,7 +842,7 @@ fn to_camel_case(s: &str) -> String { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.index(&FullRange)); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[]); } } } @@ -981,8 +981,8 @@ fn to_snake_case(str: &str) -> String { if !is_snake_case(ident) { cx.span_lint(NON_SNAKE_CASE, span, - format!("{} `{}` should have a snake case name such as `{}`", - sort, s, to_snake_case(s.get())).index(&FullRange)); + &format!("{} `{}` should have a snake case name such as `{}`", + sort, s, to_snake_case(s.get()))[]); } } } @@ -1066,10 +1066,10 @@ fn check_item(&mut self, cx: &Context, it: &ast::Item) { // upper/lowercase) if s.get().chars().any(|c| c.is_lowercase()) { cx.span_lint(NON_UPPER_CASE_GLOBALS, it.span, - format!("static constant `{}` should have an uppercase name \ + &format!("static constant `{}` should have an uppercase name \ such as `{}`", - s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::().index(&FullRange)).index(&FullRange)); + s.get(), &s.get().chars().map(|c| c.to_uppercase()) + .collect::()[])[]); } } _ => {} @@ -1083,10 +1083,10 @@ fn check_pat(&mut self, cx: &Context, p: &ast::Pat) { let s = token::get_ident(path1.node); if s.get().chars().any(|c| c.is_lowercase()) { cx.span_lint(NON_UPPER_CASE_GLOBALS, path1.span, - format!("static constant in pattern `{}` should have an uppercase \ + &format!("static constant in pattern `{}` should have an uppercase \ name such as `{}`", - s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::().index(&FullRange)).index(&FullRange)); + s.get(), &s.get().chars().map(|c| c.to_uppercase()) + .collect::()[])[]); } } _ => {} @@ -1110,8 +1110,8 @@ fn check_unused_parens_core(&self, cx: &Context, value: &ast::Expr, msg: &str, let necessary = struct_lit_needs_parens && contains_exterior_struct_lit(&**inner); if !necessary { cx.span_lint(UNUSED_PARENS, value.span, - format!("unnecessary parentheses around {}", - msg).index(&FullRange)) + &format!("unnecessary parentheses around {}", + msg)[]) } } @@ -1213,7 +1213,7 @@ fn check_view_item(&mut self, cx: &Context, view_item: &ast::ViewItem) { let m = format!("braces around {} is unnecessary", token::get_ident(*name).get()); cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span, - m.index(&FullRange)); + &m[]); }, _ => () } @@ -1251,8 +1251,8 @@ fn check_pat(&mut self, cx: &Context, pat: &ast::Pat) { if let ast::PatIdent(_, ident, None) = fieldpat.node.pat.node { if ident.node.as_str() == fieldpat.node.ident.as_str() { cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, - format!("the `{}:` in this pattern is redundant and can \ - be removed", ident.node.as_str()).index(&FullRange)) + &format!("the `{}:` in this pattern is redundant and can \ + be removed", ident.node.as_str())[]) } } } @@ -1356,7 +1356,7 @@ fn get_lints(&self) -> LintArray { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprMatch(_, ref arms, _) = e.node { for a in arms.iter() { - self.check_unused_mut_pat(cx, a.pats.index(&FullRange)) + self.check_unused_mut_pat(cx, &a.pats[]) } } } @@ -1477,7 +1477,7 @@ fn check_missing_docs_attrs(&self, }); if !has_doc { cx.span_lint(MISSING_DOCS, sp, - format!("missing documentation for {}", desc).index(&FullRange)); + &format!("missing documentation for {}", desc)[]); } } } @@ -1491,7 +1491,7 @@ fn enter_lint_attrs(&mut self, _: &Context, attrs: &[ast::Attribute]) { let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::contains_name(l.index(&FullRange), "hidden"), + Some(l) => attr::contains_name(&l[], "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); @@ -1513,7 +1513,7 @@ fn check_struct_def_post(&mut self, _: &Context, } fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) { - self.check_missing_docs_attrs(cx, None, krate.attrs.index(&FullRange), + self.check_missing_docs_attrs(cx, None, &krate.attrs[], krate.span, "crate"); } @@ -1527,7 +1527,7 @@ fn check_item(&mut self, cx: &Context, it: &ast::Item) { ast::ItemTy(..) => "a type alias", _ => return }; - self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.index(&FullRange), + self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs[], it.span, desc); } @@ -1540,13 +1540,13 @@ fn check_fn(&mut self, cx: &Context, // Otherwise, doc according to privacy. This will also check // doc for default methods defined on traits. - self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.index(&FullRange), + self.check_missing_docs_attrs(cx, Some(m.id), &m.attrs[], m.span, "a method"); } } fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) { - self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.index(&FullRange), + self.check_missing_docs_attrs(cx, Some(tm.id), &tm.attrs[], tm.span, "a type method"); } @@ -1556,14 +1556,14 @@ fn check_struct_field(&mut self, cx: &Context, sf: &ast::StructField) { let cur_struct_def = *self.struct_def_stack.last() .expect("empty struct_def_stack"); self.check_missing_docs_attrs(cx, Some(cur_struct_def), - sf.node.attrs.index(&FullRange), sf.span, + &sf.node.attrs[], sf.span, "a struct field") } } } fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) { - self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.index(&FullRange), + self.check_missing_docs_attrs(cx, Some(v.node.id), &v.node.attrs[], v.span, "a variant"); assert!(!self.in_variant); self.in_variant = true; @@ -1674,7 +1674,7 @@ fn lint(&self, cx: &Context, id: ast::DefId, span: Span) { _ => format!("use of {} item", label) }; - cx.span_lint(lint, span, msg.index(&FullRange)); + cx.span_lint(lint, span, &msg[]); } fn is_internal(&self, cx: &Context, span: Span) -> bool { diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 51998bdbcf2..204120be111 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -104,7 +104,7 @@ pub fn new() -> LintStore { } pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] { - self.lints.index(&FullRange) + &self.lints[] } pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec, bool)> { @@ -124,11 +124,11 @@ pub fn register_pass(&mut self, sess: Option<&Session>, match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg.index(&FullRange)), - (Some(sess), false) => sess.bug(msg.index(&FullRange)), + (None, _) => early_error(&msg[]), + (Some(sess), false) => sess.bug(&msg[]), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg.index(&FullRange)), + (Some(sess), true) => sess.err(&msg[]), } } @@ -149,11 +149,11 @@ pub fn register_group(&mut self, sess: Option<&Session>, match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg.index(&FullRange)), - (Some(sess), false) => sess.bug(msg.index(&FullRange)), + (None, _) => early_error(&msg[]), + (Some(sess), false) => sess.bug(&msg[]), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg.index(&FullRange)), + (Some(sess), true) => sess.err(&msg[]), } } } @@ -267,8 +267,8 @@ fn find_lint(&self, lint_name: &str, sess: &Session, span: Option) let warning = format!("lint {} has been renamed to {}", lint_name, new_name); match span { - Some(span) => sess.span_warn(span, warning.index(&FullRange)), - None => sess.warn(warning.index(&FullRange)), + Some(span) => sess.span_warn(span, &warning[]), + None => sess.warn(&warning[]), }; Some(lint_id) } @@ -278,21 +278,21 @@ fn find_lint(&self, lint_name: &str, sess: &Session, span: Option) pub fn process_command_line(&mut self, sess: &Session) { for &(ref lint_name, level) in sess.opts.lint_opts.iter() { - match self.find_lint(lint_name.index(&FullRange), sess, None) { + match self.find_lint(&lint_name[], sess, None) { Some(lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone())) .collect::>>() - .get(lint_name.index(&FullRange)) { + .get(&lint_name[]) { Some(v) => { v.iter() .map(|lint_id: &LintId| self.set_level(*lint_id, (level, CommandLine))) .collect::>(); } - None => sess.err(format!("unknown {} flag: {}", - level.as_str(), lint_name).index(&FullRange)), + None => sess.err(&format!("unknown {} flag: {}", + level.as_str(), lint_name)[]), } } } @@ -403,10 +403,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint, if level == Forbid { level = Deny; } match (level, span) { - (Warn, Some(sp)) => sess.span_warn(sp, msg.index(&FullRange)), - (Warn, None) => sess.warn(msg.index(&FullRange)), - (Deny, Some(sp)) => sess.span_err(sp, msg.index(&FullRange)), - (Deny, None) => sess.err(msg.index(&FullRange)), + (Warn, Some(sp)) => sess.span_warn(sp, &msg[]), + (Warn, None) => sess.warn(&msg[]), + (Deny, Some(sp)) => sess.span_err(sp, &msg[]), + (Deny, None) => sess.err(&msg[]), _ => sess.bug("impossible level in raw_emit_lint"), } @@ -513,9 +513,9 @@ fn with_lint_attrs(&mut self, if now == Forbid && level != Forbid { let lint_name = lint_id.as_str(); self.tcx.sess.span_err(span, - format!("{}({}) overruled by outer forbid({})", + &format!("{}({}) overruled by outer forbid({})", level.as_str(), lint_name, - lint_name).index(&FullRange)); + lint_name)[]); } else if now != level { let src = self.lints.get_level_source(lint_id).1; self.level_stack.push((lint_id, (now, src))); @@ -550,7 +550,7 @@ fn visit_ids(&mut self, f: F) where impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { fn visit_item(&mut self, it: &ast::Item) { - self.with_lint_attrs(it.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&it.attrs[], |cx| { run_lints!(cx, check_item, it); cx.visit_ids(|v| v.visit_item(it)); visit::walk_item(cx, it); @@ -558,14 +558,14 @@ fn visit_item(&mut self, it: &ast::Item) { } fn visit_foreign_item(&mut self, it: &ast::ForeignItem) { - self.with_lint_attrs(it.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&it.attrs[], |cx| { run_lints!(cx, check_foreign_item, it); visit::walk_foreign_item(cx, it); }) } fn visit_view_item(&mut self, i: &ast::ViewItem) { - self.with_lint_attrs(i.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&i.attrs[], |cx| { run_lints!(cx, check_view_item, i); cx.visit_ids(|v| v.visit_view_item(i)); visit::walk_view_item(cx, i); @@ -591,7 +591,7 @@ fn visit_fn(&mut self, fk: FnKind<'v>, decl: &'v ast::FnDecl, body: &'v ast::Block, span: Span, id: ast::NodeId) { match fk { visit::FkMethod(_, _, m) => { - self.with_lint_attrs(m.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&m.attrs[], |cx| { run_lints!(cx, check_fn, fk, decl, body, span, id); cx.visit_ids(|v| { v.visit_fn(fk, decl, body, span, id); @@ -607,7 +607,7 @@ fn visit_fn(&mut self, fk: FnKind<'v>, decl: &'v ast::FnDecl, } fn visit_ty_method(&mut self, t: &ast::TypeMethod) { - self.with_lint_attrs(t.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&t.attrs[], |cx| { run_lints!(cx, check_ty_method, t); visit::walk_ty_method(cx, t); }) @@ -624,14 +624,14 @@ fn visit_struct_def(&mut self, } fn visit_struct_field(&mut self, s: &ast::StructField) { - self.with_lint_attrs(s.node.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&s.node.attrs[], |cx| { run_lints!(cx, check_struct_field, s); visit::walk_struct_field(cx, s); }) } fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) { - self.with_lint_attrs(v.node.attrs.index(&FullRange), |cx| { + self.with_lint_attrs(&v.node.attrs[], |cx| { run_lints!(cx, check_variant, v, g); visit::walk_variant(cx, v, g); run_lints!(cx, check_variant_post, v, g); @@ -725,7 +725,7 @@ fn visit_id(&mut self, id: ast::NodeId) { None => {} Some(lints) => { for (lint_id, span, msg) in lints.into_iter() { - self.span_lint(lint_id.lint, span, msg.index(&FullRange)) + self.span_lint(lint_id.lint, span, &msg[]) } } } @@ -771,7 +771,7 @@ pub fn check_crate(tcx: &ty::ctxt, let mut cx = Context::new(tcx, krate, exported_items); // Visit the whole crate. - cx.with_lint_attrs(krate.attrs.index(&FullRange), |cx| { + cx.with_lint_attrs(&krate.attrs[], |cx| { cx.visit_id(ast::CRATE_NODE_ID); cx.visit_ids(|v| { v.visited_outermost = true; diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 66967a73546..310874c311b 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -65,7 +65,7 @@ fn dump_crates(cstore: &CStore) { } fn should_link(i: &ast::ViewItem) -> bool { - !attr::contains_name(i.attrs.index(&FullRange), "no_link") + !attr::contains_name(&i.attrs[], "no_link") } @@ -90,7 +90,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { for c in s.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } - err(format!("invalid character `{}` in crate name: `{}`", c, s).index(&FullRange)); + err(&format!("invalid character `{}` in crate name: `{}`", c, s)[]); } match sess { Some(sess) => sess.abort_if_errors(), @@ -189,8 +189,8 @@ fn process_view_item(&mut self, i: &ast::ViewItem) { match self.extract_crate_info(i) { Some(info) => { let (cnum, _, _) = self.resolve_crate(&None, - info.ident.index(&FullRange), - info.name.index(&FullRange), + &info.ident[], + &info.name[], None, i.span, PathKind::Crate); @@ -209,7 +209,7 @@ fn extract_crate_info(&self, i: &ast::ViewItem) -> Option { let name = match *path_opt { Some((ref path_str, _)) => { let name = path_str.get().to_string(); - validate_crate_name(Some(self.sess), name.index(&FullRange), + validate_crate_name(Some(self.sess), &name[], Some(i.span)); name } @@ -275,8 +275,8 @@ fn process_item(&self, i: &ast::Item) { cstore::NativeUnknown } else { self.sess.span_err(m.span, - format!("unknown kind: `{}`", - k).index(&FullRange)); + &format!("unknown kind: `{}`", + k)[]); cstore::NativeUnknown } } @@ -330,7 +330,7 @@ fn existing_match(&self, name: &str, match self.sess.opts.externs.get(name) { Some(locs) => { let found = locs.iter().any(|l| { - let l = fs::realpath(&Path::new(l.index(&FullRange))).ok(); + let l = fs::realpath(&Path::new(&l[])).ok(); l == source.dylib || l == source.rlib }); if found { @@ -409,7 +409,7 @@ fn resolve_crate(&mut self, crate_name: name, hash: hash.map(|a| &*a), filesearch: self.sess.target_filesearch(kind), - triple: self.sess.opts.target_triple.index(&FullRange), + triple: &self.sess.opts.target_triple[], root: root, rejected_via_hash: vec!(), rejected_via_triple: vec!(), @@ -435,8 +435,8 @@ fn resolve_crate_deps(&mut self, decoder::get_crate_deps(cdata).iter().map(|dep| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, _, _) = self.resolve_crate(root, - dep.name.index(&FullRange), - dep.name.index(&FullRange), + &dep.name[], + &dep.name[], Some(&dep.hash), span, PathKind::Dependency); @@ -447,7 +447,7 @@ fn resolve_crate_deps(&mut self, pub fn read_plugin_metadata<'b>(&'b mut self, vi: &'b ast::ViewItem) -> PluginMetadata<'b> { let info = self.extract_crate_info(vi).unwrap(); - let target_triple = self.sess.opts.target_triple.index(&FullRange); + let target_triple = &self.sess.opts.target_triple[]; let is_cross = target_triple != config::host_triple(); let mut should_link = info.should_link && !is_cross; let mut target_only = false; @@ -456,8 +456,8 @@ pub fn read_plugin_metadata<'b>(&'b mut self, let mut load_ctxt = loader::Context { sess: self.sess, span: vi.span, - ident: ident.index(&FullRange), - crate_name: name.index(&FullRange), + ident: &ident[], + crate_name: &name[], hash: None, filesearch: self.sess.host_filesearch(PathKind::Crate), triple: config::host_triple(), @@ -485,8 +485,8 @@ pub fn read_plugin_metadata<'b>(&'b mut self, let register = should_link && self.existing_match(info.name.as_slice(), None).is_none(); let metadata = if register { // Register crate now to avoid double-reading metadata - let (_, cmd, _) = self.register_crate(&None, info.ident.index(&FullRange), - info.name.index(&FullRange), vi.span, library); + let (_, cmd, _) = self.register_crate(&None, &info.ident[], + &info.name[], vi.span, library); PMDSource::Registered(cmd) } else { // Not registering the crate; just hold on to the metadata @@ -507,8 +507,8 @@ pub fn read_plugin_metadata<'b>(&'b mut self, impl<'a> PluginMetadata<'a> { /// Read exported macros pub fn exported_macros(&self) -> Vec { - let imported_from = Some(token::intern(self.info.ident.index(&FullRange)).ident()); - let source_name = format!("<{} macros>", self.info.ident.index(&FullRange)); + let imported_from = Some(token::intern(&self.info.ident[]).ident()); + let source_name = format!("<{} macros>", &self.info.ident[]); let mut macros = vec![]; decoder::each_exported_macro(self.metadata.as_slice(), &*self.sess.cstore.intr, @@ -550,7 +550,7 @@ pub fn plugin_registrar(&self) -> Option<(Path, String)> { self.info.ident, config::host_triple(), self.sess.opts.target_triple); - self.sess.span_err(self.vi_span, message.index(&FullRange)); + self.sess.span_err(self.vi_span, &message[]); self.sess.abort_if_errors(); } @@ -563,7 +563,7 @@ pub fn plugin_registrar(&self) -> Option<(Path, String)> { let message = format!("plugin crate `{}` only found in rlib format, \ but must be available in dylib format", self.info.ident); - self.sess.span_err(self.vi_span, message.index(&FullRange)); + self.sess.span_err(self.vi_span, &message[]); // No need to abort because the loading code will just ignore this // empty dylib. None diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 72ce61b133a..b83532bd3f9 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -95,7 +95,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - let mut r = vec![ast_map::PathMod(token::intern(cdata.name.index(&FullRange)))]; + let mut r = vec![ast_map::PathMod(token::intern(&cdata.name[]))]; r.push_all(path.as_slice()); r } diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 9e71c867efa..a1f144fa1f4 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -75,7 +75,7 @@ fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option(item_id: ast::NodeId, items: rbml::Doc<'a>) -> Option> { fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool { return u64_from_be_bytes( - bytes.index(&(0u..4u)), 0u, 4u) as ast::NodeId + &bytes[0u..4u], 0u, 4u) as ast::NodeId == item_id; } lookup_hash(items, @@ -1191,7 +1191,7 @@ fn docstr(doc: rbml::Doc, tag_: uint) -> String { } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).index(&FullRange)); + let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash)[]); deps.push(CrateDep { cnum: crate_num, name: name, diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 83038df338b..1579d6f4442 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -95,7 +95,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) { } pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { - rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).index(&FullRange)); + rbml_w.wr_tagged_str(tag_def_id, &def_to_string(id)[]); } #[derive(Clone)] @@ -154,7 +154,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) { rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(s.index(&FullRange)); + rbml_w.wr_str(&s[]); rbml_w.end_tag(); } @@ -264,7 +264,7 @@ fn encode_symbol(ecx: &EncodeContext, } None => { ecx.diag.handler().bug( - format!("encode_symbol: id not found {}", id).index(&FullRange)); + &format!("encode_symbol: id not found {}", id)[]); } } rbml_w.end_tag(); @@ -332,8 +332,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext, encode_name(rbml_w, variant.node.name.name); encode_parent_item(rbml_w, local_def(id)); encode_visibility(rbml_w, variant.node.vis); - encode_attributes(rbml_w, variant.node.attrs.index(&FullRange)); - encode_repr_attrs(rbml_w, ecx, variant.node.attrs.index(&FullRange)); + encode_attributes(rbml_w, &variant.node.attrs[]); + encode_repr_attrs(rbml_w, ecx, &variant.node.attrs[]); let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id)); encode_stability(rbml_w, stab); @@ -344,9 +344,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext, let fields = ty::lookup_struct_fields(ecx.tcx, def_id); let idx = encode_info_for_struct(ecx, rbml_w, - fields.index(&FullRange), + &fields[], index); - encode_struct_fields(rbml_w, fields.index(&FullRange), def_id); + encode_struct_fields(rbml_w, &fields[], def_id); encode_index(rbml_w, idx, write_i64); } } @@ -386,12 +386,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder, exp.name, token::get_name(method_name)); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(method_def_id).index(&FullRange)); + rbml_w.wr_str(&def_to_string(method_def_id)[]); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); - rbml_w.wr_str(format!("{}::{}", + rbml_w.wr_str(&format!("{}::{}", exp.name, - token::get_name(method_name)).index(&FullRange)); + token::get_name(method_name))[]); rbml_w.end_tag(); rbml_w.end_tag(); } @@ -529,7 +529,7 @@ fn encode_reexports(ecx: &EncodeContext, id); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(exp.def_id).index(&FullRange)); + rbml_w.wr_str(&def_to_string(exp.def_id)[]); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(exp.name.as_str()); @@ -562,13 +562,13 @@ fn encode_info_for_mod(ecx: &EncodeContext, // Encode info about all the module children. for item in md.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange)); + rbml_w.wr_str(&def_to_string(local_def(item.id))[]); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id)).index(&FullRange)); + rbml_w.wr_str(&def_to_string(local_def( + auxiliary_node_id))[]); rbml_w.end_tag(); true }); @@ -580,7 +580,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, did, ecx.tcx.map.node_to_string(did)); rbml_w.start_tag(tag_mod_impl); - rbml_w.wr_str(def_to_string(local_def(did)).index(&FullRange)); + rbml_w.wr_str(&def_to_string(local_def(did))[]); rbml_w.end_tag(); } } @@ -615,7 +615,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) { ast::Public => 'y', ast::Inherited => 'i', }; - rbml_w.wr_str(ch.to_string().index(&FullRange)); + rbml_w.wr_str(&ch.to_string()[]); rbml_w.end_tag(); } @@ -627,7 +627,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder, ty::FnMutUnboxedClosureKind => 'm', ty::FnOnceUnboxedClosureKind => 'o', }; - rbml_w.wr_str(ch.to_string().index(&FullRange)); + rbml_w.wr_str(&ch.to_string()[]); rbml_w.end_tag(); } @@ -788,7 +788,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, rbml_w.end_tag(); rbml_w.wr_tagged_str(tag_region_param_def_def_id, - def_to_string(param.def_id).index(&FullRange)); + &def_to_string(param.def_id)[]); rbml_w.wr_tagged_u64(tag_region_param_def_space, param.space.to_uint() as u64); @@ -864,9 +864,9 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_path(rbml_w, impl_path.chain(Some(elem).into_iter())); match ast_item_opt { Some(&ast::MethodImplItem(ref ast_method)) => { - encode_attributes(rbml_w, ast_method.attrs.index(&FullRange)); + encode_attributes(rbml_w, &ast_method.attrs[]); let any_types = !pty.generics.types.is_empty(); - if any_types || is_default_impl || should_inline(ast_method.attrs.index(&FullRange)) { + if any_types || is_default_impl || should_inline(&ast_method.attrs[]) { encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id), ast_item_opt.unwrap())); } @@ -912,7 +912,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext, match typedef_opt { None => {} Some(typedef) => { - encode_attributes(rbml_w, typedef.attrs.index(&FullRange)); + encode_attributes(rbml_w, &typedef.attrs[]); encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx, typedef.id)); } @@ -1046,7 +1046,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_path(rbml_w, path); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); - encode_attributes(rbml_w, item.attrs.index(&FullRange)); + encode_attributes(rbml_w, &item.attrs[]); rbml_w.end_tag(); } ast::ItemConst(_, _) => { @@ -1072,8 +1072,8 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); encode_path(rbml_w, path); - encode_attributes(rbml_w, item.attrs.index(&FullRange)); - if tps_len > 0u || should_inline(item.attrs.index(&FullRange)) { + encode_attributes(rbml_w, &item.attrs[]); + if tps_len > 0u || should_inline(&item.attrs[]) { encode_inlined_item(ecx, rbml_w, IIItemRef(item)); } if tps_len == 0 { @@ -1089,7 +1089,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_info_for_mod(ecx, rbml_w, m, - item.attrs.index(&FullRange), + &item.attrs[], item.id, path, item.ident, @@ -1106,7 +1106,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, // Encode all the items in this module. for foreign_item in fm.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).index(&FullRange)); + rbml_w.wr_str(&def_to_string(local_def(foreign_item.id))[]); rbml_w.end_tag(); } encode_visibility(rbml_w, vis); @@ -1134,8 +1134,8 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_item_variances(rbml_w, ecx, item.id); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.index(&FullRange)); - encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange)); + encode_attributes(rbml_w, &item.attrs[]); + encode_repr_attrs(rbml_w, ecx, &item.attrs[]); for v in (*enum_definition).variants.iter() { encode_variant_id(rbml_w, local_def(v.node.id)); } @@ -1152,7 +1152,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_enum_variant_info(ecx, rbml_w, item.id, - (*enum_definition).variants.index(&FullRange), + &(*enum_definition).variants[], index); } ast::ItemStruct(ref struct_def, _) => { @@ -1164,7 +1164,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, class itself */ let idx = encode_info_for_struct(ecx, rbml_w, - fields.index(&FullRange), + &fields[], index); /* Index the class*/ @@ -1178,16 +1178,16 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_item_variances(rbml_w, ecx, item.id); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.index(&FullRange)); + encode_attributes(rbml_w, &item.attrs[]); encode_path(rbml_w, path.clone()); encode_stability(rbml_w, stab); encode_visibility(rbml_w, vis); - encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange)); + encode_repr_attrs(rbml_w, ecx, &item.attrs[]); /* Encode def_ids for each field and method for methods, write all the stuff get_trait_method needs to know*/ - encode_struct_fields(rbml_w, fields.index(&FullRange), def_id); + encode_struct_fields(rbml_w, &fields[], def_id); encode_inlined_item(ecx, rbml_w, IIItemRef(item)); @@ -1219,7 +1219,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_family(rbml_w, 'i'); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.index(&FullRange)); + encode_attributes(rbml_w, &item.attrs[]); encode_unsafety(rbml_w, unsafety); encode_polarity(rbml_w, polarity); match ty.node { @@ -1323,7 +1323,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics); encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.index(&FullRange)); + encode_attributes(rbml_w, &item.attrs[]); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() { @@ -1341,7 +1341,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(method_def_id.def_id()).index(&FullRange)); + rbml_w.wr_str(&def_to_string(method_def_id.def_id())[]); rbml_w.end_tag(); } encode_path(rbml_w, path.clone()); @@ -1433,14 +1433,14 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, }; match trait_item { &ast::RequiredMethod(ref m) => { - encode_attributes(rbml_w, m.attrs.index(&FullRange)); + encode_attributes(rbml_w, &m.attrs[]); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'r'); encode_method_argument_names(rbml_w, &*m.decl); } &ast::ProvidedMethod(ref m) => { - encode_attributes(rbml_w, m.attrs.index(&FullRange)); + encode_attributes(rbml_w, &m.attrs[]); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'p'); encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item)); @@ -1449,7 +1449,7 @@ fn add_to_index(item: &ast::Item, rbml_w: &Encoder, &ast::TypeTraitItem(ref associated_type) => { encode_attributes(rbml_w, - associated_type.attrs.index(&FullRange)); + &associated_type.attrs[]); encode_item_sort(rbml_w, 't'); } } @@ -1826,10 +1826,10 @@ fn encode_macro_defs(rbml_w: &mut Encoder, rbml_w.start_tag(tag_macro_def); encode_name(rbml_w, def.ident.name); - encode_attributes(rbml_w, def.attrs.index(&FullRange)); + encode_attributes(rbml_w, &def.attrs[]); rbml_w.start_tag(tag_macro_def_body); - rbml_w.wr_str(pprust::tts_to_string(def.body.index(&FullRange)).index(&FullRange)); + rbml_w.wr_str(&pprust::tts_to_string(&def.body[])[]); rbml_w.end_tag(); rbml_w.end_tag(); @@ -1869,7 +1869,7 @@ impl<'a, 'b, 'v> Visitor<'v> for StructFieldVisitor<'a, 'b> { fn visit_struct_field(&mut self, field: &ast::StructField) { self.rbml_w.start_tag(tag_struct_field); self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id); - encode_attributes(self.rbml_w, field.node.attrs.index(&FullRange)); + encode_attributes(self.rbml_w, &field.node.attrs[]); self.rbml_w.end_tag(); } } @@ -1941,13 +1941,13 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w.start_tag(tag_misc_info_crate_items); for item in krate.module.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange)); + rbml_w.wr_str(&def_to_string(local_def(item.id))[]); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id)).index(&FullRange)); + rbml_w.wr_str(&def_to_string(local_def( + auxiliary_node_id))[]); rbml_w.end_tag(); true }); @@ -2116,17 +2116,17 @@ struct Stats { let mut rbml_w = writer::Encoder::new(wr); - encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.index(&FullRange)); + encode_crate_name(&mut rbml_w, &ecx.link_meta.crate_name[]); encode_crate_triple(&mut rbml_w, - tcx.sess + &tcx.sess .opts .target_triple - .index(&FullRange)); + []); encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash); encode_dylib_dependency_formats(&mut rbml_w, &ecx); let mut i = rbml_w.writer.tell().unwrap(); - encode_attributes(&mut rbml_w, krate.attrs.index(&FullRange)); + encode_attributes(&mut rbml_w, &krate.attrs[]); stats.attr_bytes = rbml_w.writer.tell().unwrap() - i; i = rbml_w.writer.tell().unwrap(); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 0fa9472287c..3a925aba0b7 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -315,14 +315,14 @@ pub fn report_load_errs(&mut self) { &Some(ref r) => format!("{} which `{}` depends on", message, r.ident) }; - self.sess.span_err(self.span, message.index(&FullRange)); + self.sess.span_err(self.span, &message[]); if self.rejected_via_triple.len() > 0 { let mismatches = self.rejected_via_triple.iter(); for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { self.sess.fileline_note(self.span, - format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display()).index(&FullRange)); + &format!("crate `{}`, path #{}, triple {}: {}", + self.ident, i+1, got, path.display())[]); } } if self.rejected_via_hash.len() > 0 { @@ -331,16 +331,16 @@ pub fn report_load_errs(&mut self) { let mismatches = self.rejected_via_hash.iter(); for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { self.sess.fileline_note(self.span, - format!("crate `{}` path {}{}: {}", - self.ident, "#", i+1, path.display()).index(&FullRange)); + &format!("crate `{}` path {}{}: {}", + self.ident, "#", i+1, path.display())[]); } match self.root { &None => {} &Some(ref r) => { for (i, path) in r.paths().iter().enumerate() { self.sess.fileline_note(self.span, - format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display()).index(&FullRange)); + &format!("crate `{}` path #{}: {}", + r.ident, i+1, path.display())[]); } } } @@ -386,7 +386,7 @@ fn find_library_crate(&mut self) -> Option { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(rlib_prefix.index(&FullRange)) && + let (hash, rlib) = if file.starts_with(&rlib_prefix[]) && file.ends_with(".rlib") { (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()), true) @@ -445,27 +445,27 @@ fn find_library_crate(&mut self) -> Option { 1 => Some(libraries.into_iter().next().unwrap()), _ => { self.sess.span_err(self.span, - format!("multiple matching crates for `{}`", - self.crate_name).index(&FullRange)); + &format!("multiple matching crates for `{}`", + self.crate_name)[]); self.sess.note("candidates:"); for lib in libraries.iter() { match lib.dylib { Some(ref p) => { - self.sess.note(format!("path: {}", - p.display()).index(&FullRange)); + self.sess.note(&format!("path: {}", + p.display())[]); } None => {} } match lib.rlib { Some(ref p) => { - self.sess.note(format!("path: {}", - p.display()).index(&FullRange)); + self.sess.note(&format!("path: {}", + p.display())[]); } None => {} } let data = lib.metadata.as_slice(); let name = decoder::get_crate_name(data); - note_crate_name(self.sess.diagnostic(), name.index(&FullRange)); + note_crate_name(self.sess.diagnostic(), &name[]); } None } @@ -516,22 +516,22 @@ fn extract_one(&mut self, m: HashSet, flavor: &str, }; if ret.is_some() { self.sess.span_err(self.span, - format!("multiple {} candidates for `{}` \ + &format!("multiple {} candidates for `{}` \ found", flavor, - self.crate_name).index(&FullRange)); + self.crate_name)[]); self.sess.span_note(self.span, - format!(r"candidate #1: {}", + &format!(r"candidate #1: {}", ret.as_ref().unwrap() - .display()).index(&FullRange)); + .display())[]); error = 1; ret = None; } if error > 0 { error += 1; self.sess.span_note(self.span, - format!(r"candidate #{}: {}", error, - lib.display()).index(&FullRange)); + &format!(r"candidate #{}: {}", error, + lib.display())[]); continue } *slot = Some(metadata); @@ -606,17 +606,17 @@ fn find_commandline_library(&mut self) -> Option { let mut rlibs = HashSet::new(); let mut dylibs = HashSet::new(); { - let mut locs = locs.iter().map(|l| Path::new(l.index(&FullRange))).filter(|loc| { + let mut locs = locs.iter().map(|l| Path::new(&l[])).filter(|loc| { if !loc.exists() { - sess.err(format!("extern location for {} does not exist: {}", - self.crate_name, loc.display()).index(&FullRange)); + sess.err(&format!("extern location for {} does not exist: {}", + self.crate_name, loc.display())[]); return false; } let file = match loc.filename_str() { Some(file) => file, None => { - sess.err(format!("extern location for {} is not a file: {}", - self.crate_name, loc.display()).index(&FullRange)); + sess.err(&format!("extern location for {} is not a file: {}", + self.crate_name, loc.display())[]); return false; } }; @@ -624,13 +624,13 @@ fn find_commandline_library(&mut self) -> Option { return true } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(prefix.index(&FullRange)) && - file.ends_with(suffix.index(&FullRange)) { + if file.starts_with(&prefix[]) && + file.ends_with(&suffix[]) { return true } } - sess.err(format!("extern location for {} is of an unknown type: {}", - self.crate_name, loc.display()).index(&FullRange)); + sess.err(&format!("extern location for {} is of an unknown type: {}", + self.crate_name, loc.display())[]); false }); @@ -663,7 +663,7 @@ fn find_commandline_library(&mut self) -> Option { } pub fn note_crate_name(diag: &SpanHandler, name: &str) { - diag.handler().note(format!("crate name: {}", name).index(&FullRange)); + diag.handler().note(&format!("crate name: {}", name)[]); } impl ArchiveMetadata { diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index a4304bf1e2d..5aacaa04e46 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -98,7 +98,7 @@ fn scan(st: &mut PState, mut is_last: F, op: G) -> R where } let end_pos = st.pos; st.pos += 1; - return op(st.data.index(&(start_pos..end_pos))); + return op(&st.data[start_pos..end_pos]); } pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident { @@ -250,8 +250,8 @@ fn parse_trait_store_(st: &mut PState, conv: &mut F) -> ty::TraitStore where '~' => ty::UniqTraitStore, '&' => ty::RegionTraitStore(parse_region_(st, conv), parse_mutability(st)), c => { - st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", - c).index(&FullRange)) + st.tcx.sess.bug(&format!("parse_trait_store(): bad input '{}'", + c)[]) } } } @@ -318,7 +318,7 @@ fn parse_bound_region_(st: &mut PState, conv: &mut F) -> ty::BoundRegion wher } '[' => { let def = parse_def_(st, RegionParameter, conv); - let ident = token::str_to_ident(parse_str(st, ']').index(&FullRange)); + let ident = token::str_to_ident(&parse_str(st, ']')[]); ty::BrNamed(def, ident.name) } 'f' => { @@ -357,7 +357,7 @@ fn parse_region_(st: &mut PState, conv: &mut F) -> ty::Region where assert_eq!(next(st), '|'); let index = parse_u32(st); assert_eq!(next(st), '|'); - let nm = token::str_to_ident(parse_str(st, ']').index(&FullRange)); + let nm = token::str_to_ident(&parse_str(st, ']')[]); ty::ReEarlyBound(node_id, space, index, nm.name) } 'f' => { @@ -481,7 +481,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w assert_eq!(next(st), '|'); let space = parse_param_space(st); assert_eq!(next(st), '|'); - let name = token::intern(parse_str(st, ']').index(&FullRange)); + let name = token::intern(&parse_str(st, ']')[]); return ty::mk_param(tcx, space, index, name); } '~' => return ty::mk_uniq(tcx, parse_ty_(st, conv)), @@ -637,7 +637,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi { assert_eq!(next(st), '['); scan(st, |c| c == ']', |bytes| { let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(abi_str.index(&FullRange)).expect(abi_str) + abi::lookup(&abi_str[]).expect(abi_str) }) } @@ -733,8 +733,8 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId { panic!(); } - let crate_part = buf.index(&(0u..colon_idx)); - let def_part = buf.index(&((colon_idx + 1u)..len)); + let crate_part = &buf[0u..colon_idx]; + let def_part = &buf[(colon_idx + 1u)..len]; let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::()) { Some(cn) => cn as ast::CrateNum, diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs index 955f522b804..8cd3795580e 100644 --- a/src/librustc/middle/astconv_util.rs +++ b/src/librustc/middle/astconv_util.rs @@ -47,8 +47,8 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty) let a_def = match tcx.def_map.borrow().get(&id) { None => { tcx.sess.span_bug(ast_ty.span, - format!("unbound path {}", - path.repr(tcx)).index(&FullRange)) + &format!("unbound path {}", + path.repr(tcx))[]) } Some(&d) => d }; diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 32ce131c57a..550c0f34caf 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -132,7 +132,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, // Do an Option dance to use the path after it is moved below. let s = ast_map::path_to_string(ast_map::Values(path.iter())); path_as_str = Some(s); - path_as_str.as_ref().map(|x| x.index(&FullRange)) + path_as_str.as_ref().map(|x| &x[]) }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); @@ -1876,8 +1876,8 @@ fn decode_side_tables(dcx: &DecodeContext, match c::astencode_tag::from_uint(tag) { None => { dcx.tcx.sess.bug( - format!("unknown tag found in side tables: {:x}", - tag).index(&FullRange)); + &format!("unknown tag found in side tables: {:x}", + tag)[]); } Some(value) => { let val_doc = entry_doc.get(c::tag_table_val as uint); @@ -1961,8 +1961,8 @@ fn decode_side_tables(dcx: &DecodeContext, } _ => { dcx.tcx.sess.bug( - format!("unknown tag found in side tables: {:x}", - tag).index(&FullRange)); + &format!("unknown tag found in side tables: {:x}", + tag)[]); } } } diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index f7fc90bcef6..b601ea59486 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -362,7 +362,7 @@ fn expr(&mut self, expr: &ast::Expr, pred: CFGIndex) -> CFGIndex { let mut cond_exit = discr_exit; for arm in arms.iter() { cond_exit = self.add_dummy_node(&[cond_exit]); // 2 - let pats_exit = self.pats_any(arm.pats.index(&FullRange), + let pats_exit = self.pats_any(&arm.pats[], cond_exit); // 3 let guard_exit = self.opt_expr(&arm.guard, pats_exit); // 4 @@ -615,15 +615,15 @@ fn find_scope(&self, } self.tcx.sess.span_bug( expr.span, - format!("no loop scope for id {}", - loop_id).index(&FullRange)); + &format!("no loop scope for id {}", + loop_id)[]); } r => { self.tcx.sess.span_bug( expr.span, - format!("bad entry `{:?}` in def_map for label", - r).index(&FullRange)); + &format!("bad entry `{:?}` in def_map for label", + r)[]); } } } diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index 8b9a0d89b38..f4db2b6e61d 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -52,7 +52,7 @@ fn replace_newline_with_backslash_l(s: String) -> String { } impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.index(&FullRange)).unwrap() } + fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[]).unwrap() } fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("N{}", i.node_id())).unwrap() @@ -85,9 +85,9 @@ fn edge_label(&self, e: &Edge<'a>) -> dot::LabelText<'a> { let s = self.ast_map.node_to_string(node_id); // left-aligns the lines let s = replace_newline_with_backslash_l(s); - label.push_str(format!("exiting scope_{} {}", + label.push_str(&format!("exiting scope_{} {}", i, - s.index(&FullRange)).index(&FullRange)); + &s[])[]); } dot::LabelText::EscStr(label.into_cow()) } diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index 5024e5c4f77..1f779acac25 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -74,11 +74,11 @@ fn require_loop(&self, name: &str, span: Span) { Loop => {} Closure => { self.sess.span_err(span, - format!("`{}` inside of a closure", name).index(&FullRange)); + &format!("`{}` inside of a closure", name)[]); } Normal => { self.sess.span_err(span, - format!("`{}` outside of loop", name).index(&FullRange)); + &format!("`{}` outside of loop", name)[]); } } } diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index f1edfb37273..43f39a67f5c 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -47,7 +47,7 @@ /// Pretty-printer for matrices of patterns, example: /// ++++++++++++++++++++++++++ -/// + _ + .index(&FullRange) + +/// + _ + [] + /// ++++++++++++++++++++++++++ /// + true + [First] + /// ++++++++++++++++++++++++++ @@ -161,7 +161,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { // First, check legality of move bindings. check_legality_of_move_bindings(cx, arm.guard.is_some(), - arm.pats.index(&FullRange)); + &arm.pats[]); // Second, if there is a guard on each arm, make sure it isn't // assigning or borrowing anything mutably. @@ -198,7 +198,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { } // Fourth, check for unreachable arms. - check_arms(cx, inlined_arms.index(&FullRange), source); + check_arms(cx, &inlined_arms[], source); // Finally, check if the whole match expression is exhaustive. // Check for empty enum, because is_useful only works on inhabited types. @@ -228,9 +228,9 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { is_refutable(cx, &*static_inliner.fold_pat((*pat).clone()), |uncovered_pat| { cx.tcx.sess.span_err( pat.span, - format!("refutable pattern in `for` loop binding: \ + &format!("refutable pattern in `for` loop binding: \ `{}` not covered", - pat_to_string(uncovered_pat)).index(&FullRange)); + pat_to_string(uncovered_pat))[]); }); // Check legality of move bindings. @@ -303,7 +303,7 @@ fn check_arms(cx: &MatchCheckCtxt, for pat in pats.iter() { let v = vec![&**pat]; - match is_useful(cx, &seen, v.index(&FullRange), LeaveOutWitness) { + match is_useful(cx, &seen, &v[], LeaveOutWitness) { NotUseful => { match source { ast::MatchSource::IfLetDesugar { .. } => { @@ -355,7 +355,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) { match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { - let witness = match pats.index(&FullRange) { + let witness = match &pats[] { [ref witness] => &**witness, [] => DUMMY_WILD_PAT, _ => unreachable!() @@ -609,7 +609,7 @@ fn is_useful(cx: &MatchCheckCtxt, UsefulWithWitness(pats) => UsefulWithWitness({ let arity = constructor_arity(cx, &c, left_ty); let mut result = { - let pat_slice = pats.index(&FullRange); + let pat_slice = &pats[]; let subpats: Vec<_> = range(0, arity).map(|i| { pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) }).collect(); @@ -656,10 +656,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, witness: WitnessPreference) -> Usefulness { let arity = constructor_arity(cx, &ctor, lty); let matrix = Matrix(m.iter().filter_map(|r| { - specialize(cx, r.index(&FullRange), &ctor, 0u, arity) + specialize(cx, &r[], &ctor, 0u, arity) }).collect()); match specialize(cx, v, &ctor, 0u, arity) { - Some(v) => is_useful(cx, &matrix, v.index(&FullRange), witness), + Some(v) => is_useful(cx, &matrix, &v[], witness), None => NotUseful } } @@ -729,7 +729,7 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat, /// This computes the arity of a constructor. The arity of a constructor /// is how many subpattern patterns of that constructor should be expanded to. /// -/// For instance, a tuple pattern (_, 42u, Some(.index(&FullRange))) has the arity of 3. +/// For instance, a tuple pattern (_, 42u, Some([])) has the arity of 3. /// A struct pattern's arity is the number of fields it contains, etc. pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint { match ty.sty { @@ -926,8 +926,8 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat], } }; head.map(|mut head| { - head.push_all(r.index(&(0..col))); - head.push_all(r.index(&((col + 1)..))); + head.push_all(&r[0..col]); + head.push_all(&r[(col + 1)..]); head }) } @@ -1041,10 +1041,10 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, _ => { cx.tcx.sess.span_bug( p.span, - format!("binding pattern {} is not an \ + &format!("binding pattern {} is not an \ identifier: {:?}", p.id, - p.node).index(&FullRange)); + p.node)[]); } } } diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs index 994a2b0dc8a..154272d2deb 100644 --- a/src/librustc/middle/check_static.rs +++ b/src/librustc/middle/check_static.rs @@ -111,8 +111,8 @@ fn check_static_mut_type(&self, e: &ast::Expr) { return }; - self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \ - to have {}", suffix).index(&FullRange)); + self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \ + to have {}", suffix)[]); } fn check_static_type(&self, e: &ast::Expr) { @@ -169,8 +169,8 @@ fn visit_expr(&mut self, e: &ast::Expr) { ty::ty_struct(did, _) | ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => { self.tcx.sess.span_err(e.span, - format!("{} are not allowed to have \ - destructors", self.msg()).index(&FullRange)) + &format!("{} are not allowed to have \ + destructors", self.msg())[]) } _ => {} } @@ -234,7 +234,7 @@ fn visit_expr(&mut self, e: &ast::Expr) { let msg = "constants cannot refer to other statics, \ insert an intermediate constant \ instead"; - self.tcx.sess.span_err(e.span, msg.index(&FullRange)); + self.tcx.sess.span_err(e.span, &msg[]); } _ => {} } diff --git a/src/librustc/middle/check_static_recursion.rs b/src/librustc/middle/check_static_recursion.rs index 75851f0a853..e2a0738def1 100644 --- a/src/librustc/middle/check_static_recursion.rs +++ b/src/librustc/middle/check_static_recursion.rs @@ -104,8 +104,8 @@ fn visit_expr(&mut self, e: &ast::Expr) { ast_map::NodeForeignItem(_) => {}, _ => { self.sess.span_err(e.span, - format!("expected item, found {}", - self.ast_map.node_to_string(def_id.node)).index(&FullRange)); + &format!("expected item, found {}", + self.ast_map.node_to_string(def_id.node))[]); return; }, } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index e726993bd48..04d4b41b21a 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -48,7 +48,7 @@ // target uses". This _includes_ integer-constants, plus the following // constructors: // -// fixed-size vectors and strings: .index(&FullRange) and ""/_ +// fixed-size vectors and strings: [] and ""/_ // vector and string slices: &[] and &"" // tuples: (,) // enums: foo(...) @@ -117,7 +117,7 @@ fn variant_expr<'a>(variants: &'a [P], id: ast::NodeId) None => None, Some(ast_map::NodeItem(it)) => match it.node { ast::ItemEnum(ast::EnumDef { ref variants }, _) => { - variant_expr(variants.index(&FullRange), variant_def.node) + variant_expr(&variants[], variant_def.node) } _ => None }, @@ -138,7 +138,7 @@ fn variant_expr<'a>(variants: &'a [P], id: ast::NodeId) // NOTE this doesn't do the right thing, it compares inlined // NodeId's to the original variant_def's NodeId, but they // come from different crates, so they will likely never match. - variant_expr(variants.index(&FullRange), variant_def.node).map(|e| e.id) + variant_expr(&variants[], variant_def.node).map(|e| e.id) } _ => None }, @@ -364,7 +364,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { match eval_const_expr_partial(tcx, e) { Ok(r) => r, - Err(s) => tcx.sess.span_fatal(e.span, s.index(&FullRange)) + Err(s) => tcx.sess.span_fatal(e.span, &s[]) } } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index bdd98a94fc3..4ae0aa43406 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -312,7 +312,7 @@ pub fn each_bit_for_node(&self, e: EntryOrExit, cfgidx: CFGIndex, f: F) -> bo let mut t = on_entry.to_vec(); self.apply_gen_kill(cfgidx, t.as_mut_slice()); temp_bits = t; - temp_bits.index(&FullRange) + &temp_bits[] } }; debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}", @@ -421,7 +421,7 @@ pub fn add_kills_from_flow_exits(&mut self, cfg: &cfg::CFG) { let bits = self.kills.slice_mut(start, end); debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); - bits.clone_from_slice(orig_kills.index(&FullRange)); + bits.clone_from_slice(&orig_kills[]); debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); } @@ -554,7 +554,7 @@ fn bits_to_string(words: &[uint]) -> String { let mut v = word; for _ in range(0u, uint::BYTES) { result.push(sep); - result.push_str(format!("{:02x}", v & 0xFF).index(&FullRange)); + result.push_str(&format!("{:02x}", v & 0xFF)[]); v >>= 8; sep = '-'; } diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 0bc899a8a62..cca0b7d9ad0 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -117,8 +117,8 @@ fn calculate_type(sess: &session::Session, sess.cstore.iter_crate_data(|cnum, data| { let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.rlib.is_some() { return } - sess.err(format!("dependency `{}` not found in rlib format", - data.name).index(&FullRange)); + sess.err(&format!("dependency `{}` not found in rlib format", + data.name)[]); }); return Vec::new(); } @@ -191,13 +191,13 @@ fn calculate_type(sess: &session::Session, Some(cstore::RequireDynamic) if src.dylib.is_some() => continue, Some(kind) => { let data = sess.cstore.get_crate_data(cnum + 1); - sess.err(format!("crate `{}` required to be available in {}, \ + sess.err(&format!("crate `{}` required to be available in {}, \ but it was not available in this form", data.name, match kind { cstore::RequireStatic => "rlib", cstore::RequireDynamic => "dylib", - }).index(&FullRange)); + })[]); } } } @@ -220,9 +220,9 @@ fn add_library(sess: &session::Session, // can be refined over time. if link2 != link || link == cstore::RequireStatic { let data = sess.cstore.get_crate_data(cnum); - sess.err(format!("cannot satisfy dependencies so `{}` only \ + sess.err(&format!("cannot satisfy dependencies so `{}` only \ shows up once", - data.name).index(&FullRange)); + data.name)[]); sess.help("having upstream crates all available in one format \ will likely make this go away"); } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 45838436e60..e5eb439d42c 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -857,8 +857,8 @@ fn walk_autoderefs(&mut self, let (m, r) = match self_ty.sty { ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => self.tcx().sess.span_bug(expr.span, - format!("bad overloaded deref type {}", - method_ty.repr(self.tcx())).index(&FullRange)) + &format!("bad overloaded deref type {}", + method_ty.repr(self.tcx()))[]) }; let bk = ty::BorrowKind::from_mutbl(m); self.delegate.borrow(expr.id, expr.span, cmt, @@ -1180,7 +1180,7 @@ fn walk_pat(&mut self, let msg = format!("Pattern has unexpected def: {:?} and type {}", def, cmt_pat.ty.repr(tcx)); - tcx.sess.span_bug(pat.span, msg.index(&FullRange)) + tcx.sess.span_bug(pat.span, &msg[]) } } } diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs index 22975f54a9f..05f1da78848 100644 --- a/src/librustc/middle/infer/combine.rs +++ b/src/librustc/middle/infer/combine.rs @@ -142,7 +142,7 @@ fn substs_variances(&self, for _ in a_regions.iter() { invariance.push(ty::Invariant); } - invariance.index(&FullRange) + &invariance[] } }; @@ -477,10 +477,10 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, (&ty::ty_infer(TyVar(_)), _) | (_, &ty::ty_infer(TyVar(_))) => { tcx.sess.bug( - format!("{}: bot and var types should have been handled ({},{})", + &format!("{}: bot and var types should have been handled ({},{})", this.tag(), a.repr(this.infcx().tcx), - b.repr(this.infcx().tcx)).index(&FullRange)); + b.repr(this.infcx().tcx))[]); } (&ty::ty_err, _) | (_, &ty::ty_err) => { @@ -855,8 +855,8 @@ fn fold_region(&mut self, r: ty::Region) -> ty::Region { ty::ReEarlyBound(..) => { self.tcx().sess.span_bug( self.span, - format!("Encountered early bound region when generalizing: {}", - r.repr(self.tcx())).index(&FullRange)); + &format!("Encountered early bound region when generalizing: {}", + r.repr(self.tcx()))[]); } // Always make a fresh region variable for skolemized regions; diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs index 762617fb49b..bbd12c9671d 100644 --- a/src/librustc/middle/infer/error_reporting.rs +++ b/src/librustc/middle/infer/error_reporting.rs @@ -200,9 +200,9 @@ fn report_region_errors(&self, ref trace_origins, ref same_regions) => { if !same_regions.is_empty() { - self.report_processed_errors(var_origins.index(&FullRange), - trace_origins.index(&FullRange), - same_regions.index(&FullRange)); + self.report_processed_errors(&var_origins[], + &trace_origins[], + &same_regions[]); } } } @@ -373,10 +373,10 @@ fn report_type_error(&self, trace: TypeTrace<'tcx>, terr: &ty::type_err<'tcx>) { self.tcx.sess.span_err( trace.origin.span(), - format!("{}: {} ({})", + &format!("{}: {} ({})", message_root_str, expected_found_str, - ty::type_err_to_str(self.tcx, terr)).index(&FullRange)); + ty::type_err_to_str(self.tcx, terr))[]); match trace.origin { infer::MatchExpressionArm(_, arm_span) => @@ -445,42 +445,42 @@ fn report_generic_bound_failure(&self, // Does the required lifetime have a nice name we can print? self.tcx.sess.span_err( origin.span(), - format!("{} may not live long enough", labeled_user_string).index(&FullRange)); + &format!("{} may not live long enough", labeled_user_string)[]); self.tcx.sess.span_help( origin.span(), - format!( + &format!( "consider adding an explicit lifetime bound `{}: {}`...", bound_kind.user_string(self.tcx), - sub.user_string(self.tcx)).index(&FullRange)); + sub.user_string(self.tcx))[]); } ty::ReStatic => { // Does the required lifetime have a nice name we can print? self.tcx.sess.span_err( origin.span(), - format!("{} may not live long enough", labeled_user_string).index(&FullRange)); + &format!("{} may not live long enough", labeled_user_string)[]); self.tcx.sess.span_help( origin.span(), - format!( + &format!( "consider adding an explicit lifetime bound `{}: 'static`...", - bound_kind.user_string(self.tcx)).index(&FullRange)); + bound_kind.user_string(self.tcx))[]); } _ => { // If not, be less specific. self.tcx.sess.span_err( origin.span(), - format!( + &format!( "{} may not live long enough", - labeled_user_string).index(&FullRange)); + labeled_user_string)[]); self.tcx.sess.span_help( origin.span(), - format!( + &format!( "consider adding an explicit lifetime bound for `{}`", - bound_kind.user_string(self.tcx)).index(&FullRange)); + bound_kind.user_string(self.tcx))[]); note_and_explain_region( self.tcx, - format!("{} must be valid for ", labeled_user_string).index(&FullRange), + &format!("{} must be valid for ", labeled_user_string)[], sub, "..."); } @@ -517,12 +517,12 @@ fn report_concrete_failure(&self, infer::ReborrowUpvar(span, ref upvar_id) => { self.tcx.sess.span_err( span, - format!("lifetime of borrowed pointer outlives \ + &format!("lifetime of borrowed pointer outlives \ lifetime of captured variable `{}`...", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).index(&FullRange)); + .to_string())[]); note_and_explain_region( self.tcx, "...the borrowed pointer is valid for ", @@ -530,11 +530,11 @@ fn report_concrete_failure(&self, "..."); note_and_explain_region( self.tcx, - format!("...but `{}` is only valid for ", + &format!("...but `{}` is only valid for ", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).index(&FullRange), + .to_string())[], sup, ""); } @@ -576,11 +576,11 @@ fn report_concrete_failure(&self, infer::FreeVariable(span, id) => { self.tcx.sess.span_err( span, - format!("captured variable `{}` does not \ + &format!("captured variable `{}` does not \ outlive the enclosing closure", ty::local_var_name_str(self.tcx, id).get() - .to_string()).index(&FullRange)); + .to_string())[]); note_and_explain_region( self.tcx, "captured variable is valid for ", @@ -620,9 +620,9 @@ fn report_concrete_failure(&self, infer::RelateParamBound(span, ty) => { self.tcx.sess.span_err( span, - format!("the type `{}` does not fulfill the \ + &format!("the type `{}` does not fulfill the \ required lifetime", - self.ty_to_string(ty)).index(&FullRange)); + self.ty_to_string(ty))[]); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -646,9 +646,9 @@ fn report_concrete_failure(&self, infer::RelateDefaultParamBound(span, ty) => { self.tcx.sess.span_err( span, - format!("the type `{}` (provided as the value of \ + &format!("the type `{}` (provided as the value of \ a type parameter) is not valid at this point", - self.ty_to_string(ty)).index(&FullRange)); + self.ty_to_string(ty))[]); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -712,9 +712,9 @@ fn report_concrete_failure(&self, infer::ExprTypeIsNotInScope(t, span) => { self.tcx.sess.span_err( span, - format!("type of expression contains references \ + &format!("type of expression contains references \ that are not valid during the expression: `{}`", - self.ty_to_string(t)).index(&FullRange)); + self.ty_to_string(t))[]); note_and_explain_region( self.tcx, "type is only valid for ", @@ -734,9 +734,9 @@ fn report_concrete_failure(&self, infer::ReferenceOutlivesReferent(ty, span) => { self.tcx.sess.span_err( span, - format!("in type `{}`, reference has a longer lifetime \ + &format!("in type `{}`, reference has a longer lifetime \ than the data it references", - self.ty_to_string(ty)).index(&FullRange)); + self.ty_to_string(ty))[]); note_and_explain_region( self.tcx, "the pointer is valid for ", @@ -861,7 +861,7 @@ fn give_suggestion(&self, same_regions: &[SameRegions]) { let (fn_decl, generics, unsafety, ident, expl_self, span) = node_inner.expect("expect item fn"); let taken = lifetimes_in_scope(self.tcx, scope_id); - let life_giver = LifeGiver::with_taken(taken.index(&FullRange)); + let life_giver = LifeGiver::with_taken(&taken[]); let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self, generics, same_regions, &life_giver); let (fn_decl, expl_self, generics) = rebuilder.rebuild(); @@ -937,7 +937,7 @@ fn rebuild(&self) } expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime, &anon_nums, ®ion_names); - inputs = self.rebuild_args_ty(inputs.index(&FullRange), lifetime, + inputs = self.rebuild_args_ty(&inputs[], lifetime, &anon_nums, ®ion_names); output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names); ty_params = self.rebuild_ty_params(ty_params, lifetime, @@ -972,7 +972,7 @@ fn pick_lifetime(&self, names.push(lt_name); } names.sort(); - let name = token::str_to_ident(names[0].index(&FullRange)).name; + let name = token::str_to_ident(&names[0][]).name; return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1220,9 +1220,9 @@ fn rebuild_arg_ty_or_output(&self, None => { self.tcx .sess - .fatal(format!( + .fatal(&format!( "unbound path {}", - pprust::path_to_string(path)).index(&FullRange)) + pprust::path_to_string(path))[]) } Some(&d) => d }; @@ -1420,7 +1420,7 @@ fn give_expl_lifetime_param(&self, opt_explicit_self, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); - self.tcx.sess.span_help(span, msg.index(&FullRange)); + self.tcx.sess.span_help(span, &msg[]); } fn report_inference_failure(&self, @@ -1461,9 +1461,9 @@ fn report_inference_failure(&self, self.tcx.sess.span_err( var_origin.span(), - format!("cannot infer an appropriate lifetime{} \ + &format!("cannot infer an appropriate lifetime{} \ due to conflicting requirements", - var_description).index(&FullRange)); + var_description)[]); } fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { @@ -1510,8 +1510,8 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { Some(values_str) => { self.tcx.sess.span_note( trace.origin.span(), - format!("...so that {} ({})", - desc, values_str).index(&FullRange)); + &format!("...so that {} ({})", + desc, values_str)[]); } None => { // Really should avoid printing this error at @@ -1520,7 +1520,7 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { // doing right now. - nmatsakis self.tcx.sess.span_note( trace.origin.span(), - format!("...so that {}", desc).index(&FullRange)); + &format!("...so that {}", desc)[]); } } } @@ -1533,11 +1533,11 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { infer::ReborrowUpvar(span, ref upvar_id) => { self.tcx.sess.span_note( span, - format!( + &format!( "...so that closure can access `{}`", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).index(&FullRange)) + .to_string())[]) } infer::InfStackClosure(span) => { self.tcx.sess.span_note( @@ -1558,11 +1558,11 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { infer::FreeVariable(span, id) => { self.tcx.sess.span_note( span, - format!("...so that captured variable `{}` \ + &format!("...so that captured variable `{}` \ does not outlive the enclosing closure", ty::local_var_name_str( self.tcx, - id).get().to_string()).index(&FullRange)); + id).get().to_string())[]); } infer::IndexSlice(span) => { self.tcx.sess.span_note( @@ -1604,9 +1604,9 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { infer::ExprTypeIsNotInScope(t, span) => { self.tcx.sess.span_note( span, - format!("...so type `{}` of expression is valid during the \ + &format!("...so type `{}` of expression is valid during the \ expression", - self.ty_to_string(t)).index(&FullRange)); + self.ty_to_string(t))[]); } infer::BindingTypeIsNotValidAtDecl(span) => { self.tcx.sess.span_note( @@ -1616,30 +1616,30 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { infer::ReferenceOutlivesReferent(ty, span) => { self.tcx.sess.span_note( span, - format!("...so that the reference type `{}` \ + &format!("...so that the reference type `{}` \ does not outlive the data it points at", - self.ty_to_string(ty)).index(&FullRange)); + self.ty_to_string(ty))[]); } infer::RelateParamBound(span, t) => { self.tcx.sess.span_note( span, - format!("...so that the type `{}` \ + &format!("...so that the type `{}` \ will meet the declared lifetime bounds", - self.ty_to_string(t)).index(&FullRange)); + self.ty_to_string(t))[]); } infer::RelateDefaultParamBound(span, t) => { self.tcx.sess.span_note( span, - format!("...so that type parameter \ + &format!("...so that type parameter \ instantiated with `{}`, \ will meet its declared lifetime bounds", - self.ty_to_string(t)).index(&FullRange)); + self.ty_to_string(t))[]); } infer::RelateRegionParamBound(span) => { self.tcx.sess.span_note( span, - format!("...so that the declared lifetime parameter bounds \ - are satisfied").index(&FullRange)); + &format!("...so that the declared lifetime parameter bounds \ + are satisfied")[]); } } } @@ -1691,7 +1691,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, Some(node) => match node { ast_map::NodeItem(item) => match item.node { ast::ItemFn(_, _, _, ref gen, _) => { - taken.push_all(gen.lifetimes.index(&FullRange)); + taken.push_all(&gen.lifetimes[]); None }, _ => None @@ -1699,7 +1699,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, ast_map::NodeImplItem(ii) => { match *ii { ast::MethodImplItem(ref m) => { - taken.push_all(m.pe_generics().lifetimes.index(&FullRange)); + taken.push_all(&m.pe_generics().lifetimes[]); Some(m.id) } ast::TypeImplItem(_) => None, @@ -1758,10 +1758,10 @@ fn give_lifetime(&self) -> ast::Lifetime { let mut lifetime; loop { let mut s = String::from_str("'"); - s.push_str(num_to_string(self.counter.get()).index(&FullRange)); + s.push_str(&num_to_string(self.counter.get())[]); if !self.taken.contains(&s) { lifetime = name_to_dummy_lifetime( - token::str_to_ident(s.index(&FullRange)).name); + token::str_to_ident(&s[]).name); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/infer/higher_ranked/mod.rs b/src/librustc/middle/infer/higher_ranked/mod.rs index 073052dd368..5d3e2dc9c93 100644 --- a/src/librustc/middle/infer/higher_ranked/mod.rs +++ b/src/librustc/middle/infer/higher_ranked/mod.rs @@ -187,9 +187,9 @@ fn generalize_region(infcx: &InferCtxt, infcx.tcx.sess.span_bug( span, - format!("region {:?} is not associated with \ + &format!("region {:?} is not associated with \ any bound region from A!", - r0).index(&FullRange)) + r0)[]) } } @@ -322,7 +322,7 @@ fn rev_lookup(infcx: &InferCtxt, } infcx.tcx.sess.span_bug( span, - format!("could not find original bound region for {:?}", r).index(&FullRange)); + &format!("could not find original bound region for {:?}", r)[]); } fn fresh_bound_variable(infcx: &InferCtxt, debruijn: ty::DebruijnIndex) -> ty::Region { @@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T, r => { combiner.infcx().tcx.sess.span_bug( combiner.trace().origin.span(), - format!("found non-region-vid: {:?}", r).index(&FullRange)); + &format!("found non-region-vid: {:?}", r)[]); } }).collect() } diff --git a/src/librustc/middle/infer/mod.rs b/src/librustc/middle/infer/mod.rs index 3f18af3d768..c92168b070e 100644 --- a/src/librustc/middle/infer/mod.rs +++ b/src/librustc/middle/infer/mod.rs @@ -1000,9 +1000,9 @@ pub fn type_error_message_str_with_expected(&self, format!(" ({})", ty::type_err_to_str(self.tcx, t_err)) }); - self.tcx.sess.span_err(sp, format!("{}{}", + self.tcx.sess.span_err(sp, &format!("{}{}", mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty), - error_str).index(&FullRange)); + error_str)[]); for err in err.iter() { ty::note_and_explain_type_err(self.tcx, *err) diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index 5c5c08b53a2..d54d0ae87ae 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -473,9 +473,9 @@ pub fn make_subregion(&self, (_, ReLateBound(..)) => { self.tcx.sess.span_bug( origin.span(), - format!("cannot relate bound region: {} <= {}", + &format!("cannot relate bound region: {} <= {}", sub.repr(self.tcx), - sup.repr(self.tcx)).index(&FullRange)); + sup.repr(self.tcx))[]); } (_, ReStatic) => { // all regions are subregions of static, so we can ignore this @@ -734,9 +734,9 @@ fn lub_concrete_regions(&self, a: Region, b: Region) -> Region { (ReEarlyBound(..), _) | (_, ReEarlyBound(..)) => { self.tcx.sess.bug( - format!("cannot relate bound region: LUB({}, {})", + &format!("cannot relate bound region: LUB({}, {})", a.repr(self.tcx), - b.repr(self.tcx)).index(&FullRange)); + b.repr(self.tcx))[]); } (ReStatic, _) | (_, ReStatic) => { @@ -750,10 +750,10 @@ fn lub_concrete_regions(&self, a: Region, b: Region) -> Region { (ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => { self.tcx.sess.span_bug( (*self.var_origins.borrow())[v_id.index as uint].span(), - format!("lub_concrete_regions invoked with \ + &format!("lub_concrete_regions invoked with \ non-concrete regions: {:?}, {:?}", a, - b).index(&FullRange)); + b)[]); } (ReFree(ref fr), ReScope(s_id)) | @@ -834,9 +834,9 @@ fn glb_concrete_regions(&self, (ReEarlyBound(..), _) | (_, ReEarlyBound(..)) => { self.tcx.sess.bug( - format!("cannot relate bound region: GLB({}, {})", + &format!("cannot relate bound region: GLB({}, {})", a.repr(self.tcx), - b.repr(self.tcx)).index(&FullRange)); + b.repr(self.tcx))[]); } (ReStatic, r) | (r, ReStatic) => { @@ -853,10 +853,10 @@ fn glb_concrete_regions(&self, (_, ReInfer(ReVar(v_id))) => { self.tcx.sess.span_bug( (*self.var_origins.borrow())[v_id.index as uint].span(), - format!("glb_concrete_regions invoked with \ + &format!("glb_concrete_regions invoked with \ non-concrete regions: {:?}, {:?}", a, - b).index(&FullRange)); + b)[]); } (ReFree(ref fr), ReScope(s_id)) | @@ -977,7 +977,7 @@ fn infer_variable_values(&self, self.expansion(var_data.as_mut_slice()); self.contraction(var_data.as_mut_slice()); let values = - self.extract_values_and_collect_conflicts(var_data.index(&FullRange), + self.extract_values_and_collect_conflicts(&var_data[], errors); self.collect_concrete_region_errors(&values, errors); values @@ -1411,11 +1411,11 @@ fn free_regions_first(a: &RegionAndOrigin, self.tcx.sess.span_bug( (*self.var_origins.borrow())[node_idx.index as uint].span(), - format!("collect_error_for_expanding_node() could not find error \ + &format!("collect_error_for_expanding_node() could not find error \ for var {:?}, lower_bounds={}, upper_bounds={}", node_idx, lower_bounds.repr(self.tcx), - upper_bounds.repr(self.tcx)).index(&FullRange)); + upper_bounds.repr(self.tcx))[]); } fn collect_error_for_contracting_node( @@ -1456,10 +1456,10 @@ fn collect_error_for_contracting_node( self.tcx.sess.span_bug( (*self.var_origins.borrow())[node_idx.index as uint].span(), - format!("collect_error_for_contracting_node() could not find error \ + &format!("collect_error_for_contracting_node() could not find error \ for var {:?}, upper_bounds={}", node_idx, - upper_bounds.repr(self.tcx)).index(&FullRange)); + upper_bounds.repr(self.tcx))[]); } fn collect_concrete_regions(&self, diff --git a/src/librustc/middle/infer/resolve.rs b/src/librustc/middle/infer/resolve.rs index 9035d72e9a2..7bb3106b0ba 100644 --- a/src/librustc/middle/infer/resolve.rs +++ b/src/librustc/middle/infer/resolve.rs @@ -95,8 +95,8 @@ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { } ty::ty_infer(_) => { self.infcx.tcx.sess.bug( - format!("Unexpected type in full type resolver: {}", - t.repr(self.infcx.tcx)).index(&FullRange)); + &format!("Unexpected type in full type resolver: {}", + t.repr(self.infcx.tcx))[]); } _ => { ty_fold::super_fold_ty(self, t) diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 850033b3ed1..1b1dca00422 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -326,8 +326,8 @@ fn variable(&self, node_id: NodeId, span: Span) -> Variable { None => { self.tcx .sess - .span_bug(span, format!("no variable registered for id {}", - node_id).index(&FullRange)); + .span_bug(span, &format!("no variable registered for id {}", + node_id)[]); } } } @@ -597,8 +597,8 @@ fn live_node(&self, node_id: NodeId, span: Span) -> LiveNode { // creating liveness nodes for. self.ir.tcx.sess.span_bug( span, - format!("no live node registered for node {}", - node_id).index(&FullRange)); + &format!("no live node registered for node {}", + node_id)[]); } } } @@ -1133,7 +1133,7 @@ fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) // Uninteresting cases: just propagate in rev exec order ast::ExprVec(ref exprs) => { - self.propagate_through_exprs(exprs.index(&FullRange), succ) + self.propagate_through_exprs(&exprs[], succ) } ast::ExprRepeat(ref element, ref count) => { @@ -1157,7 +1157,7 @@ fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) } else { succ }; - let succ = self.propagate_through_exprs(args.index(&FullRange), succ); + let succ = self.propagate_through_exprs(&args[], succ); self.propagate_through_expr(&**f, succ) } @@ -1170,11 +1170,11 @@ fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) } else { succ }; - self.propagate_through_exprs(args.index(&FullRange), succ) + self.propagate_through_exprs(&args[], succ) } ast::ExprTup(ref exprs) => { - self.propagate_through_exprs(exprs.index(&FullRange), succ) + self.propagate_through_exprs(&exprs[], succ) } ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index b29c24c5861..fb9a16f86e5 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -584,9 +584,9 @@ pub fn cat_def(&self, _ => { self.tcx().sess.span_bug( span, - format!("Upvar of non-closure {} - {}", + &format!("Upvar of non-closure {} - {}", fn_node_id, - ty.repr(self.tcx())).index(&FullRange)); + ty.repr(self.tcx()))[]); } } } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 861c4a2c85e..aa37c2fe348 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -615,10 +615,10 @@ fn report_error(&self, result: CheckResult) -> bool { match result { None => true, Some((span, msg, note)) => { - self.tcx.sess.span_err(span, msg.index(&FullRange)); + self.tcx.sess.span_err(span, &msg[]); match note { Some((span, msg)) => { - self.tcx.sess.span_note(span, msg.index(&FullRange)) + self.tcx.sess.span_note(span, &msg[]) } None => {}, } @@ -720,7 +720,7 @@ fn check_field(&mut self, UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; - self.tcx.sess.span_err(span, msg.index(&FullRange)); + self.tcx.sess.span_err(span, &msg[]); } // Given the ID of a method, checks to ensure it's in scope. @@ -741,8 +741,8 @@ fn check_static_method(&mut self, self.report_error(self.ensure_public(span, method_id, None, - format!("method `{}`", - string).index(&FullRange))); + &format!("method `{}`", + string)[])); } // Checks that a path is in scope. @@ -756,7 +756,7 @@ fn check_path(&mut self, span: Span, path_id: ast::NodeId, path: &ast::Path) { self.ensure_public(span, def, Some(origdid), - format!("{} `{}`", tyname, name).index(&FullRange)) + &format!("{} `{}`", tyname, name)[]) }; match self.last_private_map[path_id] { diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 51602e88f93..906607ddc5b 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool { // monomorphized or it was marked with `#[inline]`. This will only return // true for functions. fn item_might_be_inlined(item: &ast::Item) -> bool { - if attributes_specify_inlining(item.attrs.index(&FullRange)) { + if attributes_specify_inlining(&item.attrs[]) { return true } @@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool { fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method, impl_src: ast::DefId) -> bool { - if attributes_specify_inlining(method.attrs.index(&FullRange)) || + if attributes_specify_inlining(&method.attrs[]) || generics_require_inlining(method.pe_generics()) { return true } @@ -202,7 +202,7 @@ fn def_id_represents_local_inlined_item(&self, def_id: ast::DefId) -> bool { ast::MethodImplItem(ref method) => { if generics_require_inlining(method.pe_generics()) || attributes_specify_inlining( - method.attrs.index(&FullRange)) { + &method.attrs[]) { true } else { let impl_did = self.tcx @@ -247,9 +247,9 @@ fn propagate(&mut self) { Some(ref item) => self.propagate_node(item, search_item), None if search_item == ast::CRATE_NODE_ID => {} None => { - self.tcx.sess.bug(format!("found unmapped ID in worklist: \ + self.tcx.sess.bug(&format!("found unmapped ID in worklist: \ {}", - search_item).index(&FullRange)) + search_item)[]) } } } @@ -338,10 +338,10 @@ fn propagate_node(&mut self, node: &ast_map::Node, _ => { self.tcx .sess - .bug(format!("found unexpected thingy in worklist: {}", + .bug(&format!("found unexpected thingy in worklist: {}", self.tcx .map - .node_to_string(search_item)).index(&FullRange)) + .node_to_string(search_item))[]) } } } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 5d18843097f..5d33a7efd3b 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -643,7 +643,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) { // A, but the inner rvalues `a()` and `b()` have an extended lifetime // due to rule C. // - // FIXME(#6308) -- Note that `.index(&FullRange)` patterns work more smoothly post-DST. + // FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST. match local.init { Some(ref expr) => { diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 8e03d774b81..b670099ff96 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -398,8 +398,8 @@ fn resolve_free_lifetime_ref(&mut self, fn unresolved_lifetime_ref(&self, lifetime_ref: &ast::Lifetime) { self.sess.span_err( lifetime_ref.span, - format!("use of undeclared lifetime name `{}`", - token::get_name(lifetime_ref.name)).index(&FullRange)); + &format!("use of undeclared lifetime name `{}`", + token::get_name(lifetime_ref.name))[]); } fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec) { @@ -411,9 +411,9 @@ fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec ty::Region { let span = self.span.unwrap_or(DUMMY_SP); self.tcx().sess.span_bug( span, - format!("Type parameter out of range \ + &format!("Type parameter out of range \ when substituting in region {} (root type={}) \ (space={:?}, index={})", region_name.as_str(), self.root_ty.repr(self.tcx()), - space, i).index(&FullRange)); + space, i)[]); } } } @@ -654,14 +654,14 @@ fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> { let span = self.span.unwrap_or(DUMMY_SP); self.tcx().sess.span_bug( span, - format!("Type parameter `{}` ({}/{:?}/{}) out of range \ + &format!("Type parameter `{}` ({}/{:?}/{}) out of range \ when substituting (root type={}) substs={}", p.repr(self.tcx()), source_ty.repr(self.tcx()), p.space, p.idx, self.root_ty.repr(self.tcx()), - self.substs.repr(self.tcx())).index(&FullRange)); + self.substs.repr(self.tcx()))[]); } }; diff --git a/src/librustc/middle/traits/coherence.rs b/src/librustc/middle/traits/coherence.rs index 49c7d6aafaa..489731e7554 100644 --- a/src/librustc/middle/traits/coherence.rs +++ b/src/librustc/middle/traits/coherence.rs @@ -136,8 +136,8 @@ fn ty_is_local_constructor<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { ty::ty_open(..) | ty::ty_err => { tcx.sess.bug( - format!("ty_is_local invoked on unexpected type: {}", - ty.repr(tcx)).index(&FullRange)) + &format!("ty_is_local invoked on unexpected type: {}", + ty.repr(tcx))[]) } } } diff --git a/src/librustc/middle/traits/error_reporting.rs b/src/librustc/middle/traits/error_reporting.rs index fd6773afb76..02c913a9e81 100644 --- a/src/librustc/middle/traits/error_reporting.rs +++ b/src/librustc/middle/traits/error_reporting.rs @@ -337,7 +337,7 @@ pub fn suggest_new_overflow_limit(tcx: &ty::ctxt, span: Span) { let suggested_limit = current_limit * 2; tcx.sess.span_note( span, - format!( + &format!( "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit).index(&FullRange)); + suggested_limit)[]); } diff --git a/src/librustc/middle/traits/fulfill.rs b/src/librustc/middle/traits/fulfill.rs index 71a3ad64faf..c3b9be85eb5 100644 --- a/src/librustc/middle/traits/fulfill.rs +++ b/src/librustc/middle/traits/fulfill.rs @@ -227,7 +227,7 @@ pub fn select_where_possible<'a>(&mut self, } pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] { - self.predicates.index(&FullRange) + &self.predicates[] } /// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it diff --git a/src/librustc/middle/traits/object_safety.rs b/src/librustc/middle/traits/object_safety.rs index aaf0d4fcb33..c0399112c33 100644 --- a/src/librustc/middle/traits/object_safety.rs +++ b/src/librustc/middle/traits/object_safety.rs @@ -178,7 +178,7 @@ fn object_safety_violations_for_method<'tcx>(tcx: &ty::ctxt<'tcx>, // The `Self` type is erased, so it should not appear in list of // arguments or return type apart from the receiver. let ref sig = method.fty.sig; - for &input_ty in sig.0.inputs.index(&(1..)).iter() { + for &input_ty in sig.0.inputs[1..].iter() { if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) { return Some(MethodViolationCode::ReferencesSelf); } diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 7c8e0d90e11..f38b1ee975c 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -903,7 +903,7 @@ fn assemble_candidates_from_caller_bounds(&mut self, let all_bounds = util::transitive_bounds( - self.tcx(), caller_trait_refs.index(&FullRange)); + self.tcx(), &caller_trait_refs[]); let matching_bounds = all_bounds.filter( @@ -1465,9 +1465,9 @@ fn builtin_bound(&mut self, ty::ty_infer(ty::FreshTy(_)) | ty::ty_infer(ty::FreshIntTy(_)) => { self.tcx().sess.bug( - format!( + &format!( "asked to assemble builtin bounds of unexpected type: {}", - self_ty.repr(self.tcx())).index(&FullRange)); + self_ty.repr(self.tcx()))[]); } }; @@ -1636,8 +1636,8 @@ fn confirm_builtin_candidate(&mut self, AmbiguousBuiltin | ParameterBuiltin => { self.tcx().sess.span_bug( obligation.cause.span, - format!("builtin bound for {} was ambig", - obligation.repr(self.tcx())).index(&FullRange)); + &format!("builtin bound for {} was ambig", + obligation.repr(self.tcx()))[]); } } } @@ -1815,8 +1815,8 @@ fn confirm_fn_pointer_candidate(&mut self, _ => { self.tcx().sess.span_bug( obligation.cause.span, - format!("Fn pointer candidate for inappropriate self type: {}", - self_ty.repr(self.tcx())).index(&FullRange)); + &format!("Fn pointer candidate for inappropriate self type: {}", + self_ty.repr(self.tcx()))[]); } }; @@ -1944,9 +1944,9 @@ fn rematch_impl(&mut self, } Err(()) => { self.tcx().sess.bug( - format!("Impl {} was matchable against {} but now is not", + &format!("Impl {} was matchable against {} but now is not", impl_def_id.repr(self.tcx()), - obligation.repr(self.tcx())).index(&FullRange)); + obligation.repr(self.tcx()))[]); } } } diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 46d2ff7e1b9..113522cc9da 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -2036,8 +2036,8 @@ pub fn to_opt_poly_trait_ref(&self) -> Option> { /// struct Foo> { ... } /// /// Here, the `Generics` for `Foo` would contain a list of bounds like -/// `[.index(&FullRange), [U:Bar]]`. Now if there were some particular reference -/// like `Foo`, then the `GenericBounds` would be `[.index(&FullRange), +/// `[[], [U:Bar]]`. Now if there were some particular reference +/// like `Foo`, then the `GenericBounds` would be `[[], /// [uint:Bar]]`. #[derive(Clone, Show)] pub struct GenericBounds<'tcx> { @@ -2212,9 +2212,9 @@ pub fn for_item(cx: &'a ctxt<'tcx>, id: NodeId) -> ParameterEnvironment<'a, 'tcx ParameterEnvironment::for_item(cx, cx.map.get_parent(id)) } _ => { - cx.sess.bug(format!("ParameterEnvironment::from_item(): \ + cx.sess.bug(&format!("ParameterEnvironment::from_item(): \ `{}` is not an item", - cx.map.node_to_string(id)).index(&FullRange)) + cx.map.node_to_string(id))[]) } } } @@ -2299,7 +2299,7 @@ pub fn trait_did(&self, cx: &ctxt) -> ast::DefId { }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(err.index(&FullRange)), + Err(err) => cx.sess.fatal(&err[]), } } } @@ -2620,7 +2620,7 @@ fn add_sty(&mut self, st: &sty) { } &ty_tup(ref ts) => { - self.add_tys(ts.index(&FullRange)); + self.add_tys(&ts[]); } &ty_bare_fn(_, ref f) => { @@ -2643,7 +2643,7 @@ fn add_tys(&mut self, tys: &[Ty]) { fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) { let mut computation = FlagComputation::new(); - computation.add_tys(fn_sig.0.inputs.index(&FullRange)); + computation.add_tys(&fn_sig.0.inputs[]); if let ty::FnConverging(output) = fn_sig.0.output { computation.add_ty(output); @@ -2812,7 +2812,7 @@ pub fn mk_trait<'tcx>(cx: &ctxt<'tcx>, fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool { bounds.len() == 0 || - bounds.index(&(1..)).iter().enumerate().all( + bounds[1..].iter().enumerate().all( |(index, bound)| bounds[index].sort_key() <= bound.sort_key()) } @@ -3066,8 +3066,8 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { ty_vec(ty, _) => ty, ty_str => mk_mach_uint(cx, ast::TyU8), ty_open(ty) => sequence_element_type(cx, ty), - _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}", - ty_to_string(cx, ty)).index(&FullRange)), + _ => cx.sess.bug(&format!("sequence_element_type called on non-sequence value: {}", + ty_to_string(cx, ty))[]), } } @@ -3401,7 +3401,7 @@ fn tc_ty<'tcx>(cx: &ctxt<'tcx>, ty_struct(did, substs) => { let flds = struct_fields(cx, did, substs); let mut res = - TypeContents::union(flds.index(&FullRange), + TypeContents::union(&flds[], |f| tc_mt(cx, f.mt, cache)); if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) { @@ -3425,15 +3425,15 @@ fn tc_ty<'tcx>(cx: &ctxt<'tcx>, } ty_tup(ref tys) => { - TypeContents::union(tys.index(&FullRange), + TypeContents::union(&tys[], |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, substs) => { let variants = substd_enum_variants(cx, did, substs); let mut res = - TypeContents::union(variants.index(&FullRange), |variant| { - TypeContents::union(variant.args.index(&FullRange), + TypeContents::union(&variants[], |variant| { + TypeContents::union(&variant.args[], |arg_ty| { tc_ty(cx, *arg_ty, cache) }) @@ -4017,8 +4017,8 @@ pub fn deref<'tcx>(ty: Ty<'tcx>, explicit: bool) -> Option> { pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { match ty.sty { ty_open(ty) => mk_rptr(cx, cx.mk_region(ReStatic), mt {ty: ty, mutbl:ast::MutImmutable}), - _ => cx.sess.bug(format!("Trying to close a non-open type {}", - ty_to_string(cx, ty)).index(&FullRange)) + _ => cx.sess.bug(&format!("Trying to close a non-open type {}", + ty_to_string(cx, ty))[]) } } @@ -4118,8 +4118,8 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) match cx.trait_refs.borrow().get(&id) { Some(ty) => ty.clone(), None => cx.sess.bug( - format!("node_id_to_trait_ref: no trait ref for node `{}`", - cx.map.node_to_string(id)).index(&FullRange)) + &format!("node_id_to_trait_ref: no trait ref for node `{}`", + cx.map.node_to_string(id))[]) } } @@ -4131,8 +4131,8 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> { match try_node_id_to_type(cx, id) { Some(ty) => ty, None => cx.sess.bug( - format!("node_id_to_type: no type for node `{}`", - cx.map.node_to_string(id)).index(&FullRange)) + &format!("node_id_to_type: no type for node `{}`", + cx.map.node_to_string(id))[]) } } @@ -4218,8 +4218,8 @@ pub fn ty_region(tcx: &ctxt, ref s => { tcx.sess.span_bug( span, - format!("ty_region() invoked on an inappropriate ty: {:?}", - s).index(&FullRange)); + &format!("ty_region() invoked on an inappropriate ty: {:?}", + s)[]); } } } @@ -4278,13 +4278,13 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { e.span } Some(f) => { - cx.sess.bug(format!("Node id {} is not an expr: {:?}", + cx.sess.bug(&format!("Node id {} is not an expr: {:?}", id, - f).index(&FullRange)); + f)[]); } None => { - cx.sess.bug(format!("Node id {} is not present \ - in the node map", id).index(&FullRange)); + cx.sess.bug(&format!("Node id {} is not present \ + in the node map", id)[]); } } } @@ -4298,16 +4298,16 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { } _ => { cx.sess.bug( - format!("Variable id {} maps to {:?}, not local", + &format!("Variable id {} maps to {:?}, not local", id, - pat).index(&FullRange)); + pat)[]); } } } r => { - cx.sess.bug(format!("Variable id {} maps to {:?}, not local", + cx.sess.bug(&format!("Variable id {} maps to {:?}, not local", id, - r).index(&FullRange)); + r)[]); } } } @@ -4336,9 +4336,9 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, } ref b => { cx.sess.bug( - format!("AdjustReifyFnPointer adjustment on non-fn-item: \ + &format!("AdjustReifyFnPointer adjustment on non-fn-item: \ {:?}", - b).index(&FullRange)); + b)[]); } } } @@ -4365,11 +4365,11 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, None => { cx.sess.span_bug( span, - format!("the {}th autoderef failed: \ + &format!("the {}th autoderef failed: \ {}", i, ty_to_string(cx, adjusted_ty)) - .index(&FullRange)); + []); } } } @@ -4431,8 +4431,8 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, mk_vec(cx, ty, None) } _ => cx.sess.span_bug(span, - format!("UnsizeLength with bad sty: {:?}", - ty_to_string(cx, ty)).index(&FullRange)) + &format!("UnsizeLength with bad sty: {:?}", + ty_to_string(cx, ty))[]) }, &UnsizeStruct(box ref k, tp_index) => match ty.sty { ty_struct(did, substs) => { @@ -4443,8 +4443,8 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, mk_struct(cx, did, cx.mk_substs(unsized_substs)) } _ => cx.sess.span_bug(span, - format!("UnsizeStruct with bad sty: {:?}", - ty_to_string(cx, ty)).index(&FullRange)) + &format!("UnsizeStruct with bad sty: {:?}", + ty_to_string(cx, ty))[]) }, &UnsizeVtable(TyTrait { ref principal, ref bounds }, _) => { mk_trait(cx, principal.clone(), bounds.clone()) @@ -4456,8 +4456,8 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def { match tcx.def_map.borrow().get(&expr.id) { Some(&def) => def, None => { - tcx.sess.span_bug(expr.span, format!( - "no def-map entry for expr {}", expr.id).index(&FullRange)); + tcx.sess.span_bug(expr.span, &format!( + "no def-map entry for expr {}", expr.id)[]); } } } @@ -4550,9 +4550,9 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { def => { tcx.sess.span_bug( expr.span, - format!("uncategorized def for expr {}: {:?}", + &format!("uncategorized def for expr {}: {:?}", expr.id, - def).index(&FullRange)); + def)[]); } } } @@ -4672,12 +4672,12 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) -> uint { let mut i = 0u; for f in fields.iter() { if f.name == name { return i; } i += 1u; } - tcx.sess.bug(format!( + tcx.sess.bug(&format!( "no field named `{}` found in the list of fields `{:?}`", token::get_name(name), fields.iter() .map(|f| token::get_name(f.name).get().to_string()) - .collect::>()).index(&FullRange)); + .collect::>())[]); } pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem]) @@ -4932,7 +4932,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) match item.node { ItemTrait(_, _, _, ref ms) => { let (_, p) = - ast_util::split_trait_methods(ms.index(&FullRange)); + ast_util::split_trait_methods(&ms[]); p.iter() .map(|m| { match impl_or_trait_item( @@ -4949,16 +4949,16 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) }).collect() } _ => { - cx.sess.bug(format!("provided_trait_methods: `{:?}` is \ + cx.sess.bug(&format!("provided_trait_methods: `{:?}` is \ not a trait", - id).index(&FullRange)) + id)[]) } } } _ => { - cx.sess.bug(format!("provided_trait_methods: `{:?}` is not a \ + cx.sess.bug(&format!("provided_trait_methods: `{:?}` is not a \ trait", - id).index(&FullRange)) + id)[]) } } } else { @@ -5196,7 +5196,7 @@ pub fn from_ast_variant(cx: &ctxt<'tcx>, }; }, ast::StructVariantKind(ref struct_def) => { - let fields: &[StructField] = struct_def.fields.index(&FullRange); + let fields: &[StructField] = &struct_def.fields[]; assert!(fields.len() > 0); @@ -5346,8 +5346,8 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) Err(ref err) => { cx.sess .span_err(e.span, - format!("expected constant: {}", - *err).index(&FullRange)); + &format!("expected constant: {}", + *err)[]); } }, None => {} @@ -5636,8 +5636,8 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec { Some(fields) => (**fields).clone(), _ => { cx.sess.bug( - format!("ID not mapped to struct fields: {}", - cx.map.node_to_string(did.node)).index(&FullRange)); + &format!("ID not mapped to struct fields: {}", + cx.map.node_to_string(did.node))[]); } } } else { @@ -5670,7 +5670,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec> { v.iter().enumerate().map(|(i, &f)| { field { - name: token::intern(i.to_string().index(&FullRange)), + name: token::intern(&i.to_string()[]), mt: mt { ty: f, mutbl: MutImmutable @@ -5845,9 +5845,9 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { const_eval::const_binary(_) => "binary array" }; - tcx.sess.span_err(count_expr.span, format!( + tcx.sess.span_err(count_expr.span, &format!( "expected positive integer for repeat count, found {}", - found).index(&FullRange)); + found)[]); } Err(_) => { let found = match count_expr.node { @@ -5860,9 +5860,9 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { _ => "non-constant expression" }; - tcx.sess.span_err(count_expr.span, format!( + tcx.sess.span_err(count_expr.span, &format!( "expected constant integer for repeat count, found {}", - found).index(&FullRange)); + found)[]); } } 0 @@ -6646,7 +6646,7 @@ pub fn with_freevars(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where { match tcx.freevars.borrow().get(&fid) { None => f(&[]), - Some(d) => f(d.index(&FullRange)) + Some(d) => f(&d[]) } } diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index a38298d52dd..87f5ba0246f 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -223,17 +223,17 @@ fn dylink_registrar(&mut self, // this is fatal: there are almost certainly macros we need // inside this crate, so continue would spew "macro undefined" // errors - Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange)) + Err(err) => self.sess.span_fatal(vi.span, &err[]) }; unsafe { let registrar = - match lib.symbol(symbol.index(&FullRange)) { + match lib.symbol(&symbol[]) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros - Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange)) + Err(err) => self.sess.span_fatal(vi.span, &err[]) }; // Intentionally leak the dynamic library. We can't ever unload it diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 4968066f7b6..a4e72bc0a8a 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -558,18 +558,18 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions if !setter(&mut cg, value) { match (value, opt_type_desc) { (Some(..), None) => { - early_error(format!("codegen option `{}` takes no \ - value", key).index(&FullRange)) + early_error(&format!("codegen option `{}` takes no \ + value", key)[]) } (None, Some(type_desc)) => { - early_error(format!("codegen option `{0}` requires \ + early_error(&format!("codegen option `{0}` requires \ {1} (-C {0}=)", - key, type_desc).index(&FullRange)) + key, type_desc)[]) } (Some(value), Some(type_desc)) => { - early_error(format!("incorrect value `{}` for codegen \ + early_error(&format!("incorrect value `{}` for codegen \ option `{}` - {} was expected", - value, key, type_desc).index(&FullRange)) + value, key, type_desc)[]) } (None, None) => unreachable!() } @@ -578,8 +578,8 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions break; } if !found { - early_error(format!("unknown codegen option: `{}`", - key).index(&FullRange)); + early_error(&format!("unknown codegen option: `{}`", + key)[]); } } return cg; @@ -592,10 +592,10 @@ pub fn default_lib_output() -> CrateType { pub fn default_configuration(sess: &Session) -> ast::CrateConfig { use syntax::parse::token::intern_and_get_ident as intern; - let end = sess.target.target.target_endian.index(&FullRange); - let arch = sess.target.target.arch.index(&FullRange); - let wordsz = sess.target.target.target_word_size.index(&FullRange); - let os = sess.target.target.target_os.index(&FullRange); + let end = &sess.target.target.target_endian[]; + let arch = &sess.target.target.arch[]; + let wordsz = &sess.target.target.target_word_size[]; + let os = &sess.target.target.target_os[]; let fam = match sess.target.target.options.is_like_windows { true => InternedString::new("windows"), @@ -631,23 +631,23 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig { append_configuration(&mut user_cfg, InternedString::new("test")) } let mut v = user_cfg.into_iter().collect::>(); - v.push_all(default_cfg.index(&FullRange)); + v.push_all(&default_cfg[]); v } pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config { - let target = match Target::search(opts.target_triple.index(&FullRange)) { + let target = match Target::search(&opts.target_triple[]) { Ok(t) => t, Err(e) => { sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice()); } }; - let (int_type, uint_type) = match target.target_word_size.index(&FullRange) { + let (int_type, uint_type) = match &target.target_word_size[] { "32" => (ast::TyI32, ast::TyU32), "64" => (ast::TyI64, ast::TyU64), - w => sp.handler().fatal((format!("target specification was invalid: unrecognized \ - target-word-size {}", w)).index(&FullRange)) + w => sp.handler().fatal(&format!("target specification was invalid: unrecognized \ + target-word-size {}", w)[]) }; Config { @@ -845,7 +845,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let unparsed_crate_types = matches.opt_strs("crate-type"); let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(e.index(&FullRange))); + .unwrap_or_else(|e| early_error(&e[])); let mut lint_opts = vec!(); let mut describe_lints = false; @@ -872,8 +872,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } } if this_bit == 0 { - early_error(format!("unknown debug flag: {}", - *debug_flag).index(&FullRange)) + early_error(&format!("unknown debug flag: {}", + *debug_flag)[]) } debugging_opts |= this_bit; } @@ -917,8 +917,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { "link" => OutputTypeExe, "dep-info" => OutputTypeDepInfo, _ => { - early_error(format!("unknown emission type: `{}`", - part).index(&FullRange)) + early_error(&format!("unknown emission type: `{}`", + part)[]) } }; output_types.push(output_type) @@ -955,9 +955,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some("2") => Default, Some("3") => Aggressive, Some(arg) => { - early_error(format!("optimization level needs to be \ + early_error(&format!("optimization level needs to be \ between 0-3 (instead was `{}`)", - arg).index(&FullRange)); + arg)[]); } } } else { @@ -993,9 +993,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { None | Some("2") => FullDebugInfo, Some(arg) => { - early_error(format!("debug info level needs to be between \ + early_error(&format!("debug info level needs to be between \ 0-2 (instead was `{}`)", - arg).index(&FullRange)); + arg)[]); } } } else { @@ -1013,7 +1013,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut search_paths = SearchPaths::new(); for s in matches.opt_strs("L").iter() { - search_paths.add_path(s.index(&FullRange)); + search_paths.add_path(&s[]); } let libs = matches.opt_strs("l").into_iter().map(|s| { @@ -1043,9 +1043,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { (Some(name), "framework") => (name, cstore::NativeFramework), (Some(name), "static") => (name, cstore::NativeStatic), (_, s) => { - early_error(format!("unknown library kind `{}`, expected \ + early_error(&format!("unknown library kind `{}`, expected \ one of dylib, framework, or static", - s).index(&FullRange)); + s)[]); } }; (name.to_string(), kind) @@ -1089,7 +1089,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { --debuginfo"); } - let color = match matches.opt_str("color").as_ref().map(|s| s.index(&FullRange)) { + let color = match matches.opt_str("color").as_ref().map(|s| &s[]) { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, @@ -1097,9 +1097,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { None => Auto, Some(arg) => { - early_error(format!("argument for --color must be auto, always \ + early_error(&format!("argument for --color must be auto, always \ or never (instead was `{}`)", - arg).index(&FullRange)) + arg)[]) } }; @@ -1201,7 +1201,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let matches = - &match getopts(&["--test".to_string()], optgroups().index(&FullRange)) { + &match getopts(&["--test".to_string()], &optgroups()[]) { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test: {}", f) }; @@ -1209,7 +1209,7 @@ fn test_switch_implies_cfg_test() { let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess); - assert!((attr::contains_name(cfg.index(&FullRange), "test"))); + assert!((attr::contains_name(&cfg[], "test"))); } // When the user supplies --test and --cfg test, don't implicitly add @@ -1218,7 +1218,7 @@ fn test_switch_implies_cfg_test() { fn test_switch_implies_cfg_test_unless_cfg_test() { let matches = &match getopts(&["--test".to_string(), "--cfg=test".to_string()], - optgroups().index(&FullRange)) { + &optgroups()[]) { Ok(m) => m, Err(f) => { panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) @@ -1238,7 +1238,7 @@ fn test_can_print_warnings() { { let matches = getopts(&[ "-Awarnings".to_string() - ], optgroups().index(&FullRange)).unwrap(); + ], &optgroups()[]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1249,7 +1249,7 @@ fn test_can_print_warnings() { let matches = getopts(&[ "-Awarnings".to_string(), "-Dwarnings".to_string() - ], optgroups().index(&FullRange)).unwrap(); + ], &optgroups()[]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1259,7 +1259,7 @@ fn test_can_print_warnings() { { let matches = getopts(&[ "-Adead_code".to_string() - ], optgroups().index(&FullRange)).unwrap(); + ], &optgroups()[]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 94a6bca4e06..65dac1a5fac 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -174,7 +174,7 @@ pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap { // cases later on pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, - format!("impossible case reached: {}", msg).index(&FullRange)); + &format!("impossible case reached: {}", msg)[]); } pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) } pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) } @@ -216,7 +216,7 @@ pub fn sysroot<'a>(&'a self) -> &'a Path { } pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch { filesearch::FileSearch::new(self.sysroot(), - self.opts.target_triple.index(&FullRange), + &self.opts.target_triple[], &self.opts.search_paths, kind) } diff --git a/src/librustc/util/lev_distance.rs b/src/librustc/util/lev_distance.rs index 8f5820d92c5..ec840498ae6 100644 --- a/src/librustc/util/lev_distance.rs +++ b/src/librustc/util/lev_distance.rs @@ -48,7 +48,7 @@ fn test_lev_distance() { for c in range(0u32, MAX as u32) .filter_map(|i| from_u32(i)) .map(|i| i.to_string()) { - assert_eq!(lev_distance(c.index(&FullRange), c.index(&FullRange)), 0); + assert_eq!(lev_distance(&c[], &c[]), 0); } let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 2d433369366..4807e264708 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -55,12 +55,12 @@ pub fn note_and_explain_region(cx: &ctxt, (ref str, Some(span)) => { cx.sess.span_note( span, - format!("{}{}{}", prefix, *str, suffix).index(&FullRange)); + &format!("{}{}{}", prefix, *str, suffix)[]); Some(span) } (ref str, None) => { cx.sess.note( - format!("{}{}{}", prefix, *str, suffix).index(&FullRange)); + &format!("{}{}{}", prefix, *str, suffix)[]); None } } @@ -271,7 +271,7 @@ fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>, }; if abi != abi::Rust { - s.push_str(format!("extern {} ", abi.to_string()).index(&FullRange)); + s.push_str(&format!("extern {} ", abi.to_string())[]); }; s.push_str("fn"); @@ -290,7 +290,7 @@ fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>, Some(def_id) => { s.push_str(" {"); let path_str = ty::item_path_str(cx, def_id); - s.push_str(path_str.index(&FullRange)); + s.push_str(&path_str[]); s.push_str("}"); } None => { } @@ -305,7 +305,7 @@ fn closure_to_string<'tcx>(cx: &ctxt<'tcx>, cty: &ty::ClosureTy<'tcx>) -> String match cty.store { ty::UniqTraitStore => {} ty::RegionTraitStore(region, _) => { - s.push_str(region_to_string(cx, "", true, region).index(&FullRange)); + s.push_str(®ion_to_string(cx, "", true, region)[]); } } @@ -324,7 +324,7 @@ fn closure_to_string<'tcx>(cx: &ctxt<'tcx>, cty: &ty::ClosureTy<'tcx>) -> String assert_eq!(cty.onceness, ast::Once); s.push_str("proc"); push_sig_to_string(cx, &mut s, '(', ')', &cty.sig, - bounds_str.index(&FullRange)); + &bounds_str[]); } ty::RegionTraitStore(..) => { match cty.onceness { @@ -332,7 +332,7 @@ fn closure_to_string<'tcx>(cx: &ctxt<'tcx>, cty: &ty::ClosureTy<'tcx>) -> String ast::Once => s.push_str("once ") } push_sig_to_string(cx, &mut s, '|', '|', &cty.sig, - bounds_str.index(&FullRange)); + &bounds_str[]); } } @@ -365,7 +365,7 @@ fn push_sig_to_string<'tcx>(cx: &ctxt<'tcx>, ty::FnConverging(t) => { if !ty::type_is_nil(t) { s.push_str(" -> "); - s.push_str(ty_to_string(cx, t).index(&FullRange)); + s.push_str(&ty_to_string(cx, t)[]); } } ty::FnDiverging => { @@ -402,7 +402,7 @@ fn infer_ty_to_string(cx: &ctxt, ty: ty::InferTy) -> String { } ty_rptr(r, ref tm) => { let mut buf = region_ptr_to_string(cx, *r); - buf.push_str(mt_to_string(cx, tm).index(&FullRange)); + buf.push_str(&mt_to_string(cx, tm)[]); buf } ty_open(typ) => @@ -412,7 +412,7 @@ fn infer_ty_to_string(cx: &ctxt, ty: ty::InferTy) -> String { .iter() .map(|elem| ty_to_string(cx, *elem)) .collect::>(); - match strs.index(&FullRange) { + match &strs[] { [ref string] => format!("({},)", string), strs => format!("({})", strs.connect(", ")) } @@ -541,7 +541,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, 0 }; - for t in tps.index(&(0..(tps.len() - num_defaults))).iter() { + for t in tps[0..(tps.len() - num_defaults)].iter() { strs.push(ty_to_string(cx, *t)) } @@ -549,11 +549,11 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, format!("{}({}){}", base, if strs[0].starts_with("(") && strs[0].ends_with(",)") { - strs[0].index(&(1 .. (strs[0].len() - 2))) // Remove '(' and ',)' + &strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)' } else if strs[0].starts_with("(") && strs[0].ends_with(")") { - strs[0].index(&(1 .. (strs[0].len() - 1))) // Remove '(' and ')' + &strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')' } else { - strs[0].index(&FullRange) + &strs[0][] }, if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) }) } else if strs.len() > 0 { @@ -566,7 +566,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String { let mut s = typ.repr(cx).to_string(); if s.len() >= 32u { - s = s.index(&(0u..32u)).to_string(); + s = (&s[0u..32u]).to_string(); } return s; } @@ -631,7 +631,7 @@ fn repr(&self, tcx: &ctxt<'tcx>) -> String { impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self.index(&FullRange)) + repr_vec(tcx, &self[]) } } @@ -639,7 +639,7 @@ fn repr(&self, tcx: &ctxt<'tcx>) -> String { // autoderef cannot convert the &[T] handler impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self.index(&FullRange)) + repr_vec(tcx, &self[]) } } diff --git a/src/librustc/util/snapshot_vec.rs b/src/librustc/util/snapshot_vec.rs index d68b13aa2ff..8fc95529bc0 100644 --- a/src/librustc/util/snapshot_vec.rs +++ b/src/librustc/util/snapshot_vec.rs @@ -116,7 +116,7 @@ pub fn start_snapshot(&mut self) -> Snapshot { pub fn actions_since_snapshot(&self, snapshot: &Snapshot) -> &[UndoLog] { - self.undo_log.index(&(snapshot.length..)) + &self.undo_log[snapshot.length..] } fn assert_open_snapshot(&self, snapshot: &Snapshot) { diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index 48004acaac0..7ea192b8d6b 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, args: &str, cwd: Option<&Path>, paths: &[&Path]) -> ProcessOutput { let ar = match *maybe_ar_prog { - Some(ref ar) => ar.index(&FullRange), + Some(ref ar) => &ar[], None => "ar" }; let mut cmd = Command::new(ar); @@ -73,24 +73,21 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, Ok(prog) => { let o = prog.wait_with_output().unwrap(); if !o.status.success() { - handler.err(format!("{} failed with: {}", + handler.err(&format!("{} failed with: {}", cmd, - o.status).index(&FullRange)); - handler.note(format!("stdout ---\n{}", - str::from_utf8(o.output - .index(&FullRange)).unwrap()) - .index(&FullRange)); - handler.note(format!("stderr ---\n{}", - str::from_utf8(o.error - .index(&FullRange)).unwrap()) - .index(&FullRange)); + o.status)[]); + handler.note(&format!("stdout ---\n{}", + str::from_utf8(&o.output[]).unwrap())[]); + handler.note(&format!("stderr ---\n{}", + str::from_utf8(&o.error[]).unwrap()) + []); handler.abort_if_errors(); } o }, Err(e) => { - handler.err(format!("could not exec `{}`: {}", ar.index(&FullRange), - e).index(&FullRange)); + handler.err(&format!("could not exec `{}`: {}", &ar[], + e)[]); handler.abort_if_errors(); panic!("rustc::back::archive::run_ar() should not reach this point"); } @@ -106,16 +103,16 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str, for path in search_paths.iter() { debug!("looking for {} inside {:?}", name, path.display()); - let test = path.join(oslibname.index(&FullRange)); + let test = path.join(&oslibname[]); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(unixlibname.index(&FullRange)); + let test = path.join(&unixlibname[]); if test.exists() { return test } } } - handler.fatal(format!("could not find native static library `{}`, \ + handler.fatal(&format!("could not find native static library `{}`, \ perhaps an -L flag is missing?", - name).index(&FullRange)); + name)[]); } impl<'a> Archive<'a> { @@ -147,7 +144,7 @@ pub fn remove_file(&mut self, file: &str) { /// Lists all files in an archive pub fn files(&self) -> Vec { let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]); - let output = str::from_utf8(output.output.index(&FullRange)).unwrap(); + let output = str::from_utf8(&output.output[]).unwrap(); // use lines_any because windows delimits output with `\r\n` instead of // just `\n` output.lines_any().map(|s| s.to_string()).collect() @@ -179,9 +176,9 @@ pub fn create(config: ArchiveConfig<'a>) -> ArchiveBuilder<'a> { /// search in the relevant locations for a library named `name`. pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> { let location = find_library(name, - self.archive.slib_prefix.index(&FullRange), - self.archive.slib_suffix.index(&FullRange), - self.archive.lib_search_paths.index(&FullRange), + &self.archive.slib_prefix[], + &self.archive.slib_suffix[], + &self.archive.lib_search_paths[], self.archive.handler); self.add_archive(&location, name, |_| false) } @@ -197,12 +194,12 @@ pub fn add_rlib(&mut self, rlib: &Path, name: &str, // as simple comparison is not enough - there // might be also an extra name suffix let obj_start = format!("{}", name); - let obj_start = obj_start.index(&FullRange); + let obj_start = &obj_start[]; // Ignoring all bytecode files, no matter of // name let bc_ext = ".bytecode.deflate"; - self.add_archive(rlib, name.index(&FullRange), |fname: &str| { + self.add_archive(rlib, &name[], |fname: &str| { let skip_obj = lto && fname.starts_with(obj_start) && fname.ends_with(".o"); skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME @@ -239,7 +236,7 @@ pub fn build(self) -> Archive<'a> { // allow running `ar s file.a` to update symbols only. if self.should_update_symbols { run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "s", Some(self.work_dir.path()), args.index(&FullRange)); + "s", Some(self.work_dir.path()), &args[]); } return self.archive; } @@ -259,7 +256,7 @@ pub fn build(self) -> Archive<'a> { // Add the archive members seen so far, without updating the // symbol table (`S`). run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "cruS", Some(self.work_dir.path()), args.index(&FullRange)); + "cruS", Some(self.work_dir.path()), &args[]); args.clear(); args.push(&abs_dst); @@ -274,7 +271,7 @@ pub fn build(self) -> Archive<'a> { // necessary. let flags = if self.should_update_symbols { "crus" } else { "cruS" }; run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - flags, Some(self.work_dir.path()), args.index(&FullRange)); + flags, Some(self.work_dir.path()), &args[]); self.archive } @@ -316,7 +313,7 @@ fn add_archive(&mut self, archive: &Path, name: &str, mut skip: F) -> io::IoR } else { filename }; - let new_filename = self.work_dir.path().join(filename.index(&FullRange)); + let new_filename = self.work_dir.path().join(&filename[]); try!(fs::rename(file, &new_filename)); self.members.push(Path::new(filename)); } diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index db1dfa6b6ee..d24fd6a5b3f 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -44,15 +44,15 @@ pub fn get_rpath_flags(config: RPathConfig) -> Vec where l.map(|p| p.clone()) }).collect::>(); - let rpaths = get_rpaths(config, libs.index(&FullRange)); - flags.push_all(rpaths_to_flags(rpaths.index(&FullRange)).index(&FullRange)); + let rpaths = get_rpaths(config, &libs[]); + flags.push_all(&rpaths_to_flags(&rpaths[])[]); flags } fn rpaths_to_flags(rpaths: &[String]) -> Vec { let mut ret = Vec::new(); for rpath in rpaths.iter() { - ret.push(format!("-Wl,-rpath,{}", (*rpath).index(&FullRange))); + ret.push(format!("-Wl,-rpath,{}", &(*rpath)[])); } return ret; } @@ -82,14 +82,14 @@ fn log_rpaths(desc: &str, rpaths: &[String]) { } } - log_rpaths("relative", rel_rpaths.index(&FullRange)); - log_rpaths("fallback", fallback_rpaths.index(&FullRange)); + log_rpaths("relative", &rel_rpaths[]); + log_rpaths("fallback", &fallback_rpaths[]); let mut rpaths = rel_rpaths; - rpaths.push_all(fallback_rpaths.index(&FullRange)); + rpaths.push_all(&fallback_rpaths[]); // Remove duplicates - let rpaths = minimize_rpaths(rpaths.index(&FullRange)); + let rpaths = minimize_rpaths(&rpaths[]); return rpaths; } @@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths.iter() { - if set.insert(rpath.index(&FullRange)) { + if set.insert(&rpath[]) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs index f33971a6ac0..ac5662f534c 100644 --- a/src/librustc_back/sha2.rs +++ b/src/librustc_back/sha2.rs @@ -140,7 +140,7 @@ fn input(&mut self, input: &[u8], mut func: F) where if input.len() >= buffer_remaining { copy_memory( self.buffer.slice_mut(self.buffer_idx, size), - input.index(&(0..buffer_remaining))); + &input[0..buffer_remaining]); self.buffer_idx = 0; func(&self.buffer); i += buffer_remaining; @@ -156,7 +156,7 @@ fn input(&mut self, input: &[u8], mut func: F) where // While we have at least a full buffer size chunk's worth of data, process that data // without copying it into the buffer while input.len() - i >= size { - func(input.index(&(i..(i + size)))); + func(&input[i..(i + size)]); i += size; } @@ -166,7 +166,7 @@ fn input(&mut self, input: &[u8], mut func: F) where let input_remaining = input.len() - i; copy_memory( self.buffer.slice_to_mut(input_remaining), - input.index(&(i..))); + &input[i..]); self.buffer_idx += input_remaining; } @@ -188,7 +188,7 @@ fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8] { fn full_buffer<'s>(&'s mut self) -> &'s [u8] { assert!(self.buffer_idx == 64); self.buffer_idx = 0; - return self.buffer.index(&(0..64)); + return &self.buffer[0..64]; } fn position(&self) -> uint { self.buffer_idx } diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index 863c1a7c865..426946bb727 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -65,7 +65,7 @@ pub fn new(hash: &str) -> Svh { } pub fn as_str<'a>(&'a self) -> &'a str { - self.hash.index(&FullRange) + &self.hash[] } pub fn calculate(metadata: &Vec, krate: &ast::Crate) -> Svh { @@ -366,7 +366,7 @@ fn visit_mac(&mut self, mac: &Mac) { fn macro_name(mac: &Mac) -> token::InternedString { match &mac.node { &MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => { - let s = path.segments.index(&FullRange); + let s = &path.segments[]; assert_eq!(s.len(), 1); content(s[0].identifier) } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index 23c8fc7de51..5fe8a9d250c 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -224,8 +224,7 @@ pub fn from_json(obj: Json) -> Target { .and_then(|os| os.map(|s| s.to_string())) { Some(val) => val, None => - handler.fatal((format!("Field {} in target specification is required", name)) - .index(&FullRange)) + handler.fatal(&format!("Field {} in target specification is required", name)[]) } }; @@ -242,18 +241,18 @@ pub fn from_json(obj: Json) -> Target { macro_rules! key { ($key_name:ident) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name.index(&FullRange)).map(|o| o.as_string() + obj.find(&name[]).map(|o| o.as_string() .map(|s| base.options.$key_name = s.to_string())); } ); ($key_name:ident, bool) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name.index(&FullRange)) + obj.find(&name[]) .map(|o| o.as_boolean() .map(|s| base.options.$key_name = s)); } ); ($key_name:ident, list) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name.index(&FullRange)).map(|o| o.as_array() + obj.find(&name[]).map(|o| o.as_array() .map(|v| base.options.$key_name = v.iter() .map(|a| a.as_string().unwrap().to_string()).collect() ) @@ -369,7 +368,7 @@ macro_rules! load_specific { let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new()); - let paths = os::split_paths(target_path.index(&FullRange)); + let paths = os::split_paths(&target_path[]); // FIXME 16351: add a sane default search path? for dir in paths.iter() { diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index d942581ca62..d5ad201eabf 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -463,38 +463,38 @@ pub fn report_error_if_loan_conflicts_with_restriction(&self, (ty::MutBorrow, ty::MutBorrow) => { self.bccx.span_err( new_loan.span, - format!("cannot borrow `{}`{} as mutable \ + &format!("cannot borrow `{}`{} as mutable \ more than once at a time", - nl, new_loan_msg).index(&FullRange)) + nl, new_loan_msg)[]) } (ty::UniqueImmBorrow, _) => { self.bccx.span_err( new_loan.span, - format!("closure requires unique access to `{}` \ + &format!("closure requires unique access to `{}` \ but {} is already borrowed{}", - nl, ol_pronoun, old_loan_msg).index(&FullRange)); + nl, ol_pronoun, old_loan_msg)[]); } (_, ty::UniqueImmBorrow) => { self.bccx.span_err( new_loan.span, - format!("cannot borrow `{}`{} as {} because \ + &format!("cannot borrow `{}`{} as {} because \ previous closure requires unique access", - nl, new_loan_msg, new_loan.kind.to_user_str()).index(&FullRange)); + nl, new_loan_msg, new_loan.kind.to_user_str())[]); } (_, _) => { self.bccx.span_err( new_loan.span, - format!("cannot borrow `{}`{} as {} because \ + &format!("cannot borrow `{}`{} as {} because \ {} is also borrowed as {}{}", nl, new_loan_msg, new_loan.kind.to_user_str(), ol_pronoun, old_loan.kind.to_user_str(), - old_loan_msg).index(&FullRange)); + old_loan_msg)[]); } } @@ -502,8 +502,8 @@ pub fn report_error_if_loan_conflicts_with_restriction(&self, euv::ClosureCapture(span) => { self.bccx.span_note( span, - format!("borrow occurs due to use of `{}` in closure", - nl).index(&FullRange)); + &format!("borrow occurs due to use of `{}` in closure", + nl)[]); } _ => { } } @@ -552,7 +552,7 @@ pub fn report_error_if_loan_conflicts_with_restriction(&self, self.bccx.span_note( old_loan.span, - format!("{}; {}", borrow_summary, rule_summary).index(&FullRange)); + &format!("{}; {}", borrow_summary, rule_summary)[]); let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id()); self.bccx.span_end_note(old_loan_span, @@ -621,14 +621,14 @@ fn check_for_copy_of_frozen_path(&self, UseWhileBorrowed(loan_path, loan_span) => { self.bccx.span_err( span, - format!("cannot use `{}` because it was mutably borrowed", - self.bccx.loan_path_to_string(copy_path).index(&FullRange)) - .index(&FullRange)); + &format!("cannot use `{}` because it was mutably borrowed", + &self.bccx.loan_path_to_string(copy_path)[]) + []); self.bccx.span_note( loan_span, - format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path).index(&FullRange)) - .index(&FullRange)); + &format!("borrow of `{}` occurs here", + &self.bccx.loan_path_to_string(&*loan_path)[]) + []); } } } @@ -647,20 +647,20 @@ fn check_for_move_of_borrowed_path(&self, let err_message = match move_kind { move_data::Captured => format!("cannot move `{}` into closure because it is borrowed", - self.bccx.loan_path_to_string(move_path).index(&FullRange)), + &self.bccx.loan_path_to_string(move_path)[]), move_data::Declared | move_data::MoveExpr | move_data::MovePat => format!("cannot move out of `{}` because it is borrowed", - self.bccx.loan_path_to_string(move_path).index(&FullRange)) + &self.bccx.loan_path_to_string(move_path)[]) }; - self.bccx.span_err(span, err_message.index(&FullRange)); + self.bccx.span_err(span, &err_message[]); self.bccx.span_note( loan_span, - format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path).index(&FullRange)) - .index(&FullRange)); + &format!("borrow of `{}` occurs here", + &self.bccx.loan_path_to_string(&*loan_path)[]) + []); } } } @@ -809,34 +809,34 @@ fn check_assignment(&self, if kind == ty::FnUnboxedClosureKind { self.bccx.span_err( assignment_span, - format!("cannot assign to {}", - self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); + &format!("cannot assign to {}", + self.bccx.cmt_to_string(&*assignee_cmt))[]); self.bccx.span_help( self.tcx().map.span(upvar_id.closure_expr_id), "consider changing this closure to take self by mutable reference"); } else { self.bccx.span_err( assignment_span, - format!("cannot assign to {} {}", + &format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); + self.bccx.cmt_to_string(&*assignee_cmt))[]); } } _ => match opt_loan_path(&assignee_cmt) { Some(lp) => { self.bccx.span_err( assignment_span, - format!("cannot assign to {} {} `{}`", + &format!("cannot assign to {} {} `{}`", assignee_cmt.mutbl.to_user_str(), self.bccx.cmt_to_string(&*assignee_cmt), - self.bccx.loan_path_to_string(&*lp)).index(&FullRange)); + self.bccx.loan_path_to_string(&*lp))[]); } None => { self.bccx.span_err( assignment_span, - format!("cannot assign to {} {}", + &format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); + self.bccx.cmt_to_string(&*assignee_cmt))[]); } } } @@ -955,11 +955,11 @@ pub fn report_illegal_mutation(&self, loan: &Loan) { self.bccx.span_err( span, - format!("cannot assign to `{}` because it is borrowed", - self.bccx.loan_path_to_string(loan_path)).index(&FullRange)); + &format!("cannot assign to `{}` because it is borrowed", + self.bccx.loan_path_to_string(loan_path))[]); self.bccx.span_note( loan.span, - format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(loan_path)).index(&FullRange)); + &format!("borrow of `{}` occurs here", + self.bccx.loan_path_to_string(loan_path))[]); } } diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index d7527487465..1b120208217 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -38,7 +38,7 @@ enum Fragment { // This represents the collection of all but one of the elements // from an array at the path described by the move path index. // Note that attached MovePathIndex should have mem_categorization - // of InteriorElement (i.e. array dereference `.index(&FullRange)`). + // of InteriorElement (i.e. array dereference `&foo[]`). AllButOneFrom(MovePathIndex), } @@ -123,12 +123,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, let attrs : &[ast::Attribute]; attrs = match tcx.map.find(id) { Some(ast_map::NodeItem(ref item)) => - item.attrs.index(&FullRange), + &item.attrs[], Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) => - m.attrs.index(&FullRange), + &m.attrs[], Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) => - m.attrs.index(&FullRange), - _ => [].index(&FullRange), + &m.attrs[], + _ => &[][], }; let span_err = @@ -144,7 +144,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, mpi) in vec_rc.iter().enumerate() { let render = |&:| this.path_loan_path(*mpi).user_string(tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange)); + tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -156,7 +156,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, f) in vec_rc.iter().enumerate() { let render = |&:| f.loan_path_user_string(this, tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange)); + tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -198,11 +198,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {:?}", path_lps(moved.index(&FullRange))); + debug!("fragments 1 moved: {:?}", path_lps(&moved[])); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {:?}", path_lps(assigned.index(&FullRange))); + debug!("fragments 1 assigned: {:?}", path_lps(&assigned[])); // Second, build parents from the moved and assigned. for m in moved.iter() { @@ -222,14 +222,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {:?}", path_lps(parents.index(&FullRange))); + debug!("fragments 2 parents: {:?}", path_lps(&parents[])); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, parents.index(&FullRange))); - debug!("fragments 3 moved: {:?}", path_lps(moved.index(&FullRange))); + moved.retain(|f| non_member(*f, &parents[])); + debug!("fragments 3 moved: {:?}", path_lps(&moved[])); - assigned.retain(|f| non_member(*f, parents.index(&FullRange))); - debug!("fragments 3 assigned: {:?}", path_lps(assigned.index(&FullRange))); + assigned.retain(|f| non_member(*f, &parents[])); + debug!("fragments 3 assigned: {:?}", path_lps(&assigned[])); // Fourth, build the leftover from the moved, assigned, and parents. for m in moved.iter() { @@ -247,16 +247,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {:?}", frag_lps(unmoved.index(&FullRange))); + debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[])); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, parents.index(&FullRange)) && - non_member(mpi, moved.index(&FullRange)) && - non_member(mpi, assigned.index(&FullRange)) + Just(mpi) => non_member(mpi, &parents[]) && + non_member(mpi, &moved[]) && + non_member(mpi, &assigned[]) }); - debug!("fragments 5 unmoved: {:?}", frag_lps(unmoved.index(&FullRange))); + debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[])); // Swap contents back in. fragments.unmoved_fragments = unmoved; @@ -433,7 +433,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, let msg = format!("type {} ({:?}) is not fragmentable", parent_ty.repr(tcx), sty_and_variant_info); let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id)); - tcx.sess.opt_span_bug(opt_span, msg.index(&FullRange)) + tcx.sess.opt_span_bug(opt_span, &msg[]) } } } diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 2c48e0da01d..889a359b019 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -306,8 +306,8 @@ fn guarantee_valid(&mut self, ty::ReInfer(..) => { self.tcx().sess.span_bug( cmt.span, - format!("invalid borrow lifetime: {:?}", - loan_region).index(&FullRange)); + &format!("invalid borrow lifetime: {:?}", + loan_region)[]); } }; debug!("loan_scope = {:?}", loan_scope); diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index 1bb143e1dc8..a7771fefec4 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -119,8 +119,8 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, mc::cat_static_item => { bccx.span_err( move_from.span, - format!("cannot move out of {}", - bccx.cmt_to_string(&*move_from)).index(&FullRange)); + &format!("cannot move out of {}", + bccx.cmt_to_string(&*move_from))[]); } mc::cat_downcast(ref b, _) | @@ -130,9 +130,9 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, | ty::ty_enum(did, _) if ty::has_dtor(bccx.tcx, did) => { bccx.span_err( move_from.span, - format!("cannot move out of type `{}`, \ + &format!("cannot move out of type `{}`, \ which defines the `Drop` trait", - b.ty.user_string(bccx.tcx)).index(&FullRange)); + b.ty.user_string(bccx.tcx))[]); }, _ => panic!("this path should not cause illegal move") } @@ -152,13 +152,13 @@ fn note_move_destination(bccx: &BorrowckCtxt, "attempting to move value to here"); bccx.span_help( move_to_span, - format!("to prevent the move, \ + &format!("to prevent the move, \ use `ref {0}` or `ref mut {0}` to capture value by \ reference", - pat_name).index(&FullRange)); + pat_name)[]); } else { bccx.span_note(move_to_span, - format!("and here (use `ref {0}` or `ref mut {0}`)", - pat_name).index(&FullRange)); + &format!("and here (use `ref {0}` or `ref mut {0}`)", + pat_name)[]); } } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 88f56f68622..e734e8fb6ff 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -137,7 +137,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt, check_loans::check_loans(this, &loan_dfcx, flowed_moves, - all_loans.index(&FullRange), + &all_loans[], id, decl, body); @@ -505,7 +505,7 @@ pub fn is_subregion_of(&self, r_sub: ty::Region, r_sup: ty::Region) pub fn report(&self, err: BckError<'tcx>) { self.span_err( err.span, - self.bckerr_to_string(&err).index(&FullRange)); + &self.bckerr_to_string(&err)[]); self.note_and_explain_bckerr(err); } @@ -525,9 +525,9 @@ pub fn report_use_of_moved_value<'b>(&self, move_data::Declared => { self.tcx.sess.span_err( use_span, - format!("{} of possibly uninitialized variable: `{}`", + &format!("{} of possibly uninitialized variable: `{}`", verb, - self.loan_path_to_string(lp)).index(&FullRange)); + self.loan_path_to_string(lp))[]); (self.loan_path_to_string(moved_lp), String::new()) } @@ -566,10 +566,10 @@ pub fn report_use_of_moved_value<'b>(&self, else { "" }; self.tcx.sess.span_err( use_span, - format!("{} of {}moved value: `{}`", + &format!("{} of {}moved value: `{}`", verb, msg, - nl).index(&FullRange)); + nl)[]); (ol, moved_lp_msg) } }; @@ -585,32 +585,32 @@ pub fn report_use_of_moved_value<'b>(&self, (ty::expr_ty_adjusted(self.tcx, &*expr), expr.span) } r => { - self.tcx.sess.bug(format!("MoveExpr({}) maps to \ + self.tcx.sess.bug(&format!("MoveExpr({}) maps to \ {:?}, not Expr", the_move.id, - r).index(&FullRange)) + r)[]) } }; let (suggestion, _) = move_suggestion(param_env, expr_span, expr_ty, ("moved by default", "")); self.tcx.sess.span_note( expr_span, - format!("`{}` moved here{} because it has type `{}`, which is {}", + &format!("`{}` moved here{} because it has type `{}`, which is {}", ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion).index(&FullRange)); + suggestion)[]); } move_data::MovePat => { let pat_ty = ty::node_id_to_type(self.tcx, the_move.id); let span = self.tcx.map.span(the_move.id); self.tcx.sess.span_note(span, - format!("`{}` moved here{} because it has type `{}`, \ + &format!("`{}` moved here{} because it has type `{}`, \ which is moved by default", ol, moved_lp_msg, - pat_ty.user_string(self.tcx)).index(&FullRange)); + pat_ty.user_string(self.tcx))[]); self.tcx.sess.span_help(span, "use `ref` to override"); } @@ -623,10 +623,10 @@ pub fn report_use_of_moved_value<'b>(&self, (ty::expr_ty_adjusted(self.tcx, &*expr), expr.span) } r => { - self.tcx.sess.bug(format!("Captured({}) maps to \ + self.tcx.sess.bug(&format!("Captured({}) maps to \ {:?}, not Expr", the_move.id, - r).index(&FullRange)) + r)[]) } }; let (suggestion, help) = @@ -637,12 +637,12 @@ pub fn report_use_of_moved_value<'b>(&self, "make a copy and capture that instead to override")); self.tcx.sess.span_note( expr_span, - format!("`{}` moved into closure environment here{} because it \ + &format!("`{}` moved into closure environment here{} because it \ has type `{}`, which is {}", ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion).index(&FullRange)); + suggestion)[]); self.tcx.sess.span_help(expr_span, help); } } @@ -672,8 +672,8 @@ pub fn report_reassigned_immutable_variable(&self, &move_data::Assignment) { self.tcx.sess.span_err( span, - format!("re-assignment of immutable variable `{}`", - self.loan_path_to_string(lp)).index(&FullRange)); + &format!("re-assignment of immutable variable `{}`", + self.loan_path_to_string(lp))[]); self.tcx.sess.span_note(assign.span, "prior assignment occurs here"); } @@ -798,8 +798,8 @@ pub fn report_aliasability_violation(&self, mc::AliasableOther => { self.tcx.sess.span_err( span, - format!("{} in an aliasable location", - prefix).index(&FullRange)); + &format!("{} in an aliasable location", + prefix)[]); } mc::AliasableClosure(id) => { self.tcx.sess.span_err(span, @@ -812,12 +812,12 @@ pub fn report_aliasability_violation(&self, mc::AliasableStaticMut(..) => { self.tcx.sess.span_err( span, - format!("{} in a static location", prefix).index(&FullRange)); + &format!("{} in a static location", prefix)[]); } mc::AliasableBorrowed => { self.tcx.sess.span_err( span, - format!("{} in a `&` reference", prefix).index(&FullRange)); + &format!("{} in a `&` reference", prefix)[]); } } @@ -884,13 +884,13 @@ pub fn note_and_explain_bckerr(&self, err: BckError<'tcx>) { }; note_and_explain_region( self.tcx, - format!("{} would have to be valid for ", - descr).index(&FullRange), + &format!("{} would have to be valid for ", + descr)[], loan_scope, "..."); note_and_explain_region( self.tcx, - format!("...but {} is only valid for ", descr).index(&FullRange), + &format!("...but {} is only valid for ", descr)[], ptr_scope, ""); } @@ -910,7 +910,7 @@ pub fn append_loan_path_to_string(&self, out.push('('); self.append_loan_path_to_string(&**lp_base, out); out.push_str(DOWNCAST_PRINTED_OPERATOR); - out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange)); + out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]); out.push(')'); } @@ -924,7 +924,7 @@ pub fn append_loan_path_to_string(&self, } mc::PositionalField(idx) => { out.push('.'); - out.push_str(idx.to_string().index(&FullRange)); + out.push_str(&idx.to_string()[]); } } } @@ -956,7 +956,7 @@ pub fn append_autoderefd_loan_path_to_string(&self, out.push('('); self.append_autoderefd_loan_path_to_string(&**lp_base, out); out.push(':'); - out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange)); + out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]); out.push(')'); } diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index 647a5dd559c..20ad1307da3 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -60,7 +60,7 @@ fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String { if seen_one { sets.push_str(" "); } else { seen_one = true; } sets.push_str(variant.short_name()); sets.push_str(": "); - sets.push_str(self.dataflow_for_variant(e, n, variant).index(&FullRange)); + sets.push_str(&self.dataflow_for_variant(e, n, variant)[]); } sets } @@ -89,7 +89,7 @@ fn build_set(&self, set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp); - set.push_str(loan_str.index(&FullRange)); + set.push_str(&loan_str[]); saw_some = true; true }); diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 52d49924d05..019691c1e10 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -58,12 +58,12 @@ pub fn compile_input(sess: Session, let outputs = build_output_filenames(input, outdir, output, - krate.attrs.index(&FullRange), + &krate.attrs[], &sess); - let id = link::find_crate_name(Some(&sess), krate.attrs.index(&FullRange), + let id = link::find_crate_name(Some(&sess), &krate.attrs[], input); let expanded_crate - = match phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), + = match phase_2_configure_and_expand(&sess, krate, &id[], addl_plugins) { None => return, Some(k) => k @@ -75,7 +75,7 @@ pub fn compile_input(sess: Session, let mut forest = ast_map::Forest::new(expanded_crate); let ast_map = assign_node_ids_and_map(&sess, &mut forest); - write_out_deps(&sess, input, &outputs, id.index(&FullRange)); + write_out_deps(&sess, input, &outputs, &id[]); if stop_after_phase_2(&sess) { return; } @@ -171,9 +171,9 @@ pub fn phase_2_configure_and_expand(sess: &Session, let time_passes = sess.time_passes(); *sess.crate_types.borrow_mut() = - collect_crate_types(sess, krate.attrs.index(&FullRange)); + collect_crate_types(sess, &krate.attrs[]); *sess.crate_metadata.borrow_mut() = - collect_crate_metadata(sess, krate.attrs.index(&FullRange)); + collect_crate_metadata(sess, &krate.attrs[]); time(time_passes, "recursion limit", (), |_| { middle::recursion_limit::update_recursion_limit(sess, &krate); @@ -268,8 +268,8 @@ pub fn phase_2_configure_and_expand(sess: &Session, if cfg!(windows) { _old_path = os::getenv("PATH").unwrap_or(_old_path); let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths(); - new_path.extend(os::split_paths(_old_path.index(&FullRange)).into_iter()); - os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap()); + new_path.extend(os::split_paths(&_old_path[]).into_iter()); + os::setenv("PATH", os::join_paths(&new_path[]).unwrap()); } let cfg = syntax::ext::expand::ExpansionConfig { crate_name: crate_name.to_string(), @@ -533,7 +533,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session, time(sess.time_passes(), "LLVM passes", (), |_| write::run_passes(sess, trans, - sess.opts.output_types.index(&FullRange), + &sess.opts.output_types[], outputs)); } @@ -547,14 +547,14 @@ pub fn phase_6_link_output(sess: &Session, outputs: &OutputFilenames) { let old_path = os::getenv("PATH").unwrap_or_else(||String::new()); let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths(); - new_path.extend(os::split_paths(old_path.index(&FullRange)).into_iter()); - os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap()); + new_path.extend(os::split_paths(&old_path[]).into_iter()); + os::setenv("PATH", os::join_paths(&new_path[]).unwrap()); time(sess.time_passes(), "linking", (), |_| link::link_binary(sess, trans, outputs, - trans.link.crate_name.index(&FullRange))); + &trans.link.crate_name[])); os::setenv("PATH", old_path); } @@ -643,7 +643,7 @@ fn write_out_deps(sess: &Session, // write Makefile-compatible dependency rules let files: Vec = sess.codemap().files.borrow() .iter().filter(|fmap| fmap.is_real_file()) - .map(|fmap| escape_dep_filename(fmap.name.index(&FullRange))) + .map(|fmap| escape_dep_filename(&fmap.name[])) .collect(); let mut file = try!(io::File::create(&deps_filename)); for path in out_filenames.iter() { @@ -656,8 +656,8 @@ fn write_out_deps(sess: &Session, match result { Ok(()) => {} Err(e) => { - sess.fatal(format!("error writing dependencies to `{}`: {}", - deps_filename.display(), e).index(&FullRange)); + sess.fatal(&format!("error writing dependencies to `{}`: {}", + deps_filename.display(), e)[]); } } } @@ -726,9 +726,9 @@ pub fn collect_crate_types(session: &Session, let res = !link::invalid_output_for_target(session, *crate_type); if !res { - session.warn(format!("dropping unsupported crate type `{:?}` \ + session.warn(&format!("dropping unsupported crate type `{:?}` \ for target `{}`", - *crate_type, session.opts.target_triple).index(&FullRange)); + *crate_type, session.opts.target_triple)[]); } res diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 5af114abeea..d81689132f8 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -89,12 +89,12 @@ fn run_compiler(args: &[String]) { let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS); match matches.opt_str("explain") { Some(ref code) => { - match descriptions.find_description(code.index(&FullRange)) { + match descriptions.find_description(&code[]) { Some(ref description) => { println!("{}", description); } None => { - early_error(format!("no extended information for {}", code).index(&FullRange)); + early_error(&format!("no extended information for {}", code)[]); } } return; @@ -120,7 +120,7 @@ fn run_compiler(args: &[String]) { early_error("no input filename given"); } 1u => { - let ifile = matches.free[0].index(&FullRange); + let ifile = &matches.free[0][]; if ifile == "-" { let contents = io::stdin().read_to_end().unwrap(); let src = String::from_utf8(contents).unwrap(); @@ -297,7 +297,7 @@ fn sort_lint_groups(lints: Vec<(&'static str, Vec, bool)>) for lint in lints.into_iter() { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(name.index(&FullRange)), lint.default_level.as_str(), lint.desc); + padded(&name[]), lint.default_level.as_str(), lint.desc); } println!("\n"); }; @@ -327,7 +327,7 @@ fn sort_lint_groups(lints: Vec<(&'static str, Vec, bool)>) let desc = to.into_iter().map(|x| x.as_str().replace("_", "-")) .collect::>().connect(", "); println!(" {} {}", - padded(name.index(&FullRange)), desc); + padded(&name[]), desc); } println!("\n"); }; @@ -393,7 +393,7 @@ pub fn handle_options(mut args: Vec) -> Option { } let matches = - match getopts::getopts(args.index(&FullRange), config::optgroups().index(&FullRange)) { + match getopts::getopts(&args[], &config::optgroups()[]) { Ok(m) => m, Err(f_stable_attempt) => { // redo option parsing, including unstable options this time, @@ -567,15 +567,15 @@ pub fn monitor(f: F) { "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; for note in xs.iter() { - emitter.emit(None, note.index(&FullRange), None, diagnostic::Note) + emitter.emit(None, ¬e[], None, diagnostic::Note) } match r.read_to_string() { Ok(s) => println!("{}", s), Err(e) => { emitter.emit(None, - format!("failed to read internal \ - stderr: {}", e).index(&FullRange), + &format!("failed to read internal \ + stderr: {}", e)[], None, diagnostic::Error) } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 44a35ef6be7..c090ba033a7 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -294,9 +294,9 @@ fn post(&self, try!(pp::word(&mut s.s, "as")); try!(pp::space(&mut s.s)); try!(pp::word(&mut s.s, - ppaux::ty_to_string( + &ppaux::ty_to_string( tcx, - ty::expr_ty(tcx, expr)).index(&FullRange))); + ty::expr_ty(tcx, expr))[])); s.pclose() } _ => Ok(()) @@ -370,7 +370,7 @@ fn all_matching_node_ids<'a, 'ast>(&'a self, map: &'a ast_map::Map<'ast>) ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), ItemViaPath(ref parts) => - NodesMatchingSuffix(map.nodes_matching_suffix(parts.index(&FullRange))), + NodesMatchingSuffix(map.nodes_matching_suffix(&parts[])), } } @@ -382,7 +382,7 @@ fn to_one_node_id(self, user_option: &str, sess: &Session, map: &ast_map::Map) - user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(message.index(&FullRange)) + sess.fatal(&message[]) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -509,7 +509,7 @@ pub fn pretty_print_input(sess: Session, let is_expanded = needs_expansion(&ppm); let compute_ast_map = needs_ast_map(&ppm, &opt_uii); let krate = if compute_ast_map { - match driver::phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), None) { + match driver::phase_2_configure_and_expand(&sess, krate, &id[], None) { None => return, Some(k) => k } @@ -528,7 +528,7 @@ pub fn pretty_print_input(sess: Session, }; let src_name = driver::source_name(input); - let src = sess.codemap().get_filemap(src_name.index(&FullRange)) + let src = sess.codemap().get_filemap(&src_name[]) .src.as_bytes().to_vec(); let mut rdr = MemReader::new(src); @@ -588,16 +588,16 @@ pub fn pretty_print_input(sess: Session, (PpmFlowGraph, opt_uii) => { debug!("pretty printing flow graph for {:?}", opt_uii); let uii = opt_uii.unwrap_or_else(|| { - sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or - unique path suffix (b::c::d)").index(&FullRange)) + sess.fatal(&format!("`pretty flowgraph=..` needs NodeId (int) or + unique path suffix (b::c::d)")[]) }); let ast_map = ast_map.expect("--pretty flowgraph missing ast_map"); let nodeid = uii.to_one_node_id("--pretty", &sess, &ast_map); let node = ast_map.find(nodeid).unwrap_or_else(|| { - sess.fatal(format!("--pretty flowgraph couldn't find id: {}", - nodeid).index(&FullRange)) + sess.fatal(&format!("--pretty flowgraph couldn't find id: {}", + nodeid)[]) }); let code = blocks::Code::from_node(node); @@ -615,8 +615,8 @@ pub fn pretty_print_input(sess: Session, // point to what was found, if there's an // accessible span. match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, message.index(&FullRange)), - None => sess.fatal(message.index(&FullRange)) + Some(sp) => sess.span_fatal(sp, &message[]), + None => sess.fatal(&message[]) } } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index d301e9c7b5c..61a29e1e6f2 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -279,7 +279,7 @@ pub fn t_pair(&self, ty1: Ty<'tcx>, ty2: Ty<'tcx>) -> Ty<'tcx> { pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - ty::mk_param(self.infcx.tcx, space, index, token::intern(name.index(&FullRange))) + ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[])) } pub fn re_early_bound(&self, diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index ca6b1469f85..466bd608736 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -219,16 +219,16 @@ fn add_child(&self, // had the duplicate. let ns = ns.unwrap(); self.resolve_error(sp, - format!("duplicate definition of {} `{}`", + &format!("duplicate definition of {} `{}`", namespace_error_to_string(duplicate_type), - token::get_name(name)).index(&FullRange)); + token::get_name(name))[]); { let r = child.span_for_namespace(ns); for sp in r.iter() { self.session.span_note(*sp, - format!("first definition of {} `{}` here", + &format!("first definition of {} `{}` here", namespace_error_to_string(duplicate_type), - token::get_name(name)).index(&FullRange)); + token::get_name(name))[]); } } } @@ -1200,8 +1200,8 @@ fn build_import_directive(&mut self, SingleImport(target, _) => { debug!("(building import directive) building import \ directive: {}::{}", - self.names_to_string(module_.imports.borrow().last().unwrap() - .module_path.index(&FullRange)), + self.names_to_string(&module_.imports.borrow().last().unwrap(). + module_path[]), token::get_name(target)); let mut import_resolutions = module_.import_resolutions diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 93ad69e03b1..f119f27fd54 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1057,11 +1057,10 @@ fn resolve_imports_for_module(&mut self, module: Rc) { }; let msg = format!("unresolved import `{}`{}", self.import_path_to_string( - import_directive.module_path - .index(&FullRange), + &import_directive.module_path[], import_directive.subclass), help); - self.resolve_error(span, msg.index(&FullRange)); + self.resolve_error(span, &msg[]); } Indeterminate => break, // Bail out. We'll come around next time. Success(()) => () // Good. Continue. @@ -1091,7 +1090,7 @@ fn path_names_to_string(&self, path: &Path) -> String { .iter() .map(|seg| seg.identifier.name) .collect(); - self.names_to_string(names.index(&FullRange)) + self.names_to_string(&names[]) } fn import_directive_subclass_to_string(&mut self, @@ -1155,7 +1154,7 @@ fn resolve_import_for_module(&mut self, let module_path = &import_directive.module_path; debug!("(resolving import for module) resolving import `{}::...` in `{}`", - self.names_to_string(module_path.index(&FullRange)), + self.names_to_string(&module_path[]), self.module_to_string(&*module_)); // First, resolve the module path for the directive, if necessary. @@ -1164,7 +1163,7 @@ fn resolve_import_for_module(&mut self, Some((self.graph_root.get_module(), LastMod(AllPublic))) } else { match self.resolve_module_path(module_.clone(), - module_path.index(&FullRange), + &module_path[], DontUseLexicalScope, import_directive.span, ImportSearch) { @@ -1761,7 +1760,7 @@ fn check_for_conflicting_import(&mut self, ValueNS => "value", }, token::get_name(name).get()); - self.session.span_err(import_span, msg.index(&FullRange)); + self.session.span_err(import_span, &msg[]); } Some(_) | None => {} } @@ -1776,7 +1775,7 @@ fn check_that_import_is_importable(&mut self, if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) { let msg = format!("`{}` is not directly importable", token::get_name(name)); - self.session.span_err(import_span, msg.index(&FullRange)); + self.session.span_err(import_span, &msg[]); } } @@ -1801,7 +1800,7 @@ fn check_for_conflicts_between_imports_and_items(&mut self, crate in this module \ (maybe you meant `use {0}::*`?)", token::get_name(name).get()); - self.session.span_err(import_span, msg.index(&FullRange)); + self.session.span_err(import_span, &msg[]); } Some(_) | None => {} } @@ -1823,7 +1822,7 @@ fn check_for_conflicts_between_imports_and_items(&mut self, let msg = format!("import `{}` conflicts with value \ in this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.index(&FullRange)); + self.session.span_err(import_span, &msg[]); if let Some(span) = value.value_span { self.session.span_note(span, "conflicting value here"); @@ -1841,7 +1840,7 @@ fn check_for_conflicts_between_imports_and_items(&mut self, let msg = format!("import `{}` conflicts with type in \ this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.index(&FullRange)); + self.session.span_err(import_span, &msg[]); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting type here") @@ -1854,7 +1853,7 @@ fn check_for_conflicts_between_imports_and_items(&mut self, let msg = format!("inherent implementations \ are only allowed on types \ defined in the current module"); - self.session.span_err(span, msg.index(&FullRange)); + self.session.span_err(span, &msg[]); self.session.span_note(import_span, "import from other module here") } @@ -1863,7 +1862,7 @@ fn check_for_conflicts_between_imports_and_items(&mut self, let msg = format!("import `{}` conflicts with existing \ submodule", token::get_name(name).get()); - self.session.span_err(import_span, msg.index(&FullRange)); + self.session.span_err(import_span, &msg[]); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting module here") @@ -1891,9 +1890,9 @@ fn check_for_conflicts_between_external_crates(&self, if module.external_module_children.borrow().contains_key(&name) { self.session .span_err(span, - format!("an external crate named `{}` has already \ + &format!("an external crate named `{}` has already \ been imported into this module", - token::get_name(name).get()).index(&FullRange)); + token::get_name(name).get())[]); } } @@ -1909,10 +1908,10 @@ fn check_for_conflicts_between_external_crates_and_items(&self, if module.external_module_children.borrow().contains_key(&name) { self.session .span_err(span, - format!("the name `{}` conflicts with an external \ + &format!("the name `{}` conflicts with an external \ crate that has been imported into this \ module", - token::get_name(name).get()).index(&FullRange)); + token::get_name(name).get())[]); } } @@ -1960,7 +1959,7 @@ fn search_parent_externals(needle: Name, module: &Rc) let segment_name = token::get_name(name); let module_name = self.module_to_string(&*search_module); let mut span = span; - let msg = if "???" == module_name.index(&FullRange) { + let msg = if "???" == &module_name[] { span.hi = span.lo + Pos::from_uint(segment_name.get().len()); match search_parent_externals(name, @@ -2073,14 +2072,14 @@ fn resolve_module_path(&mut self, match module_prefix_result { Failed(None) => { let mpath = self.names_to_string(module_path); - let mpath = mpath.index(&FullRange); + let mpath = &mpath[]; match mpath.rfind(':') { Some(idx) => { let msg = format!("Could not find `{}` in `{}`", // idx +- 1 to account for the // colons on either side - mpath.index(&((idx + 1)..)), - mpath.index(&(0..(idx - 1)))); + &mpath[(idx + 1)..], + &mpath[0..(idx - 1)]); return Failed(Some((span, msg))); }, None => { @@ -2254,8 +2253,8 @@ fn resolve_item_in_lexical_scope(&mut self, PathSearch, true) { Failed(Some((span, msg))) => - self.resolve_error(span, format!("failed to resolve. {}", - msg).index(&FullRange)), + self.resolve_error(span, &format!("failed to resolve. {}", + msg)[]), Failed(None) => (), // Continue up the search chain. Indeterminate => { // We couldn't see through the higher scope because of an @@ -2515,7 +2514,7 @@ fn report_unresolved_imports(&mut self, module_: Rc) { } else { let err = format!("unresolved import (maybe you meant `{}::*`?)", sn); - self.resolve_error((*imports)[index].span, err.index(&FullRange)); + self.resolve_error((*imports)[index].span, &err[]); } } @@ -2607,7 +2606,7 @@ fn upvarify(&self, match def_like { DlDef(d @ DefUpvar(..)) => { self.session.span_bug(span, - format!("unexpected {:?} in bindings", d).index(&FullRange)) + &format!("unexpected {:?} in bindings", d)[]) } DlDef(d @ DefLocal(_)) => { let node_id = d.def_id().node; @@ -2753,7 +2752,7 @@ fn search_ribs(&self, for (i, rib) in ribs.iter().enumerate().rev() { match rib.bindings.get(&name).cloned() { Some(def_like) => { - return self.upvarify(ribs.index(&((i + 1)..)), def_like, span); + return self.upvarify(&ribs[(i + 1)..], def_like, span); } None => { // Continue. @@ -2846,7 +2845,7 @@ fn resolve_item(&mut self, item: &Item) { generics, implemented_traits, &**self_type, - impl_items.index(&FullRange)); + &impl_items[]); } ItemTrait(_, ref generics, ref bounds, ref trait_items) => { @@ -2924,7 +2923,7 @@ fn resolve_item(&mut self, item: &Item) { ItemStruct(ref struct_def, ref generics) => { self.resolve_struct(item.id, generics, - struct_def.fields.index(&FullRange)); + &struct_def.fields[]); } ItemMod(ref module_) => { @@ -2992,12 +2991,12 @@ fn with_type_parameter_rib(&mut self, type_parameters: TypeParameters, f: F) if seen_bindings.contains(&name) { self.resolve_error(type_parameter.span, - format!("the name `{}` is already \ + &format!("the name `{}` is already \ used for a type \ parameter in this type \ parameter list", token::get_name( - name)).index(&FullRange)) + name))[]) } seen_bindings.insert(name); @@ -3169,7 +3168,7 @@ fn resolve_trait_reference(&mut self, }; let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); - self.resolve_error(trait_reference.path.span, msg.index(&FullRange)); + self.resolve_error(trait_reference.path.span, &msg[]); } Some(def) => { match def { @@ -3179,16 +3178,16 @@ fn resolve_trait_reference(&mut self, } (def, _) => { self.resolve_error(trait_reference.path.span, - format!("`{}` is not a trait", + &format!("`{}` is not a trait", self.path_names_to_string( - &trait_reference.path)).index(&FullRange)); + &trait_reference.path))[]); // If it's a typedef, give a note if let DefTy(..) = def { self.session.span_note( trait_reference.path.span, - format!("`type` aliases cannot be used for traits") - .index(&FullRange)); + &format!("`type` aliases cannot be used for traits") + []); } } } @@ -3383,9 +3382,9 @@ fn check_trait_item(&self, name: Name, span: Span) { if self.trait_item_map.get(&(name, did)).is_none() { let path_str = self.path_names_to_string(&trait_ref.path); self.resolve_error(span, - format!("method `{}` is not a member of trait `{}`", + &format!("method `{}` is not a member of trait `{}`", token::get_name(name), - path_str).index(&FullRange)); + path_str)[]); } } } @@ -3451,19 +3450,19 @@ fn check_consistent_bindings(&mut self, arm: &Arm) { None => { self.resolve_error( p.span, - format!("variable `{}` from pattern #1 is \ + &format!("variable `{}` from pattern #1 is \ not bound in pattern #{}", token::get_name(key), - i + 1).index(&FullRange)); + i + 1)[]); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { self.resolve_error( binding_i.span, - format!("variable `{}` is bound with different \ + &format!("variable `{}` is bound with different \ mode in pattern #{} than in pattern #1", token::get_name(key), - i + 1).index(&FullRange)); + i + 1)[]); } } } @@ -3473,10 +3472,10 @@ fn check_consistent_bindings(&mut self, arm: &Arm) { if !map_0.contains_key(&key) { self.resolve_error( binding.span, - format!("variable `{}` from pattern {}{} is \ + &format!("variable `{}` from pattern {}{} is \ not bound in pattern {}1", token::get_name(key), - "#", i + 1, "#").index(&FullRange)); + "#", i + 1, "#")[]); } } } @@ -3591,7 +3590,7 @@ fn resolve_type(&mut self, ty: &Ty) { None => { let msg = format!("use of undeclared type name `{}`", self.path_names_to_string(path)); - self.resolve_error(ty.span, msg.index(&FullRange)); + self.resolve_error(ty.span, &msg[]); } } } @@ -3660,10 +3659,10 @@ struct or enum variant", FoundStructOrEnumVariant(..) => { self.resolve_error( pattern.span, - format!("declaration of `{}` shadows an enum \ + &format!("declaration of `{}` shadows an enum \ variant or unit-like struct in \ scope", - token::get_name(renamed)).index(&FullRange)); + token::get_name(renamed))[]); } FoundConst(ref def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ @@ -3708,23 +3707,23 @@ struct or enum variant", // Forbid duplicate bindings in the same // parameter list. self.resolve_error(pattern.span, - format!("identifier `{}` \ + &format!("identifier `{}` \ is bound more \ than once in \ this parameter \ list", token::get_ident( ident)) - .index(&FullRange)) + []) } else if bindings_list.get(&renamed) == Some(&pat_id) { // Then this is a duplicate variable in the // same disjunction, which is an error. self.resolve_error(pattern.span, - format!("identifier `{}` is bound \ + &format!("identifier `{}` is bound \ more than once in the same \ pattern", - token::get_ident(ident)).index(&FullRange)); + token::get_ident(ident))[]); } // Else, not bound in the same pattern: do // nothing. @@ -3787,7 +3786,7 @@ struct or enum variant", def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg.index(&FullRange)); + self.resolve_error(path.span, &msg[]); } } } @@ -3848,8 +3847,8 @@ fn resolve_bare_identifier_pattern(&mut self, name: Name, span: Span) Failed(err) => { match err { Some((span, msg)) => { - self.resolve_error(span, format!("failed to resolve: {}", - msg).index(&FullRange)); + self.resolve_error(span, &format!("failed to resolve: {}", + msg)[]); } None => () } @@ -4044,7 +4043,7 @@ fn resolve_module_relative_path(&mut self, let last_private; let module = self.current_module.clone(); match self.resolve_module_path(module, - module_path.index(&FullRange), + &module_path[], UseLexicalScope, path.span, PathSearch) { @@ -4058,8 +4057,8 @@ fn resolve_module_relative_path(&mut self, } }; - self.resolve_error(span, format!("failed to resolve. {}", - msg).index(&FullRange)); + self.resolve_error(span, &format!("failed to resolve. {}", + msg)[]); return None; } Indeterminate => panic!("indeterminate unexpected"), @@ -4102,7 +4101,7 @@ fn resolve_crate_relative_path(&mut self, let containing_module; let last_private; match self.resolve_module_path_from_root(root_module, - module_path.index(&FullRange), + &module_path[], 0, path.span, PathSearch, @@ -4112,13 +4111,13 @@ fn resolve_crate_relative_path(&mut self, Some((span, msg)) => (span, msg), None => { let msg = format!("Use of undeclared module `::{}`", - self.names_to_string(module_path.index(&FullRange))); + self.names_to_string(&module_path[])); (path.span, msg) } }; - self.resolve_error(span, format!("failed to resolve. {}", - msg).index(&FullRange)); + self.resolve_error(span, &format!("failed to resolve. {}", + msg)[]); return None; } @@ -4159,7 +4158,7 @@ fn resolve_identifier_in_local_ribs(&mut self, } TypeNS => { let name = ident.name; - self.search_ribs(self.type_ribs.index(&FullRange), name, span) + self.search_ribs(&self.type_ribs[], name, span) } }; @@ -4213,8 +4212,8 @@ fn resolve_item_by_name_in_lexical_scope(&mut self, Failed(err) => { match err { Some((span, msg)) => - self.resolve_error(span, format!("failed to resolve. {}", - msg).index(&FullRange)), + self.resolve_error(span, &format!("failed to resolve. {}", + msg)[]), None => () } @@ -4271,7 +4270,7 @@ fn get_module(this: &mut Resolver, span: Span, name_path: &[ast::Name]) } } else { match this.resolve_module_path(root, - name_path.index(&FullRange), + &name_path[], UseLexicalScope, span, PathSearch) { @@ -4309,7 +4308,7 @@ fn get_module(this: &mut Resolver, span: Span, name_path: &[ast::Name]) let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::>(); // Look for a method in the current self type's impl module. - match get_module(self, path.span, name_path.index(&FullRange)) { + match get_module(self, path.span, &name_path[]) { Some(module) => match module.children.borrow().get(&name) { Some(binding) => { let p_str = self.path_names_to_string(&path); @@ -4520,7 +4519,7 @@ fn resolve_expr(&mut self, expr: &Expr) { def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg.index(&FullRange)); + self.resolve_error(path.span, &msg[]); } } @@ -4580,8 +4579,8 @@ fn resolve_expr(&mut self, expr: &Expr) { None => { self.resolve_error( expr.span, - format!("use of undeclared label `{}`", - token::get_ident(label)).index(&FullRange)) + &format!("use of undeclared label `{}`", + token::get_ident(label))[]) } Some(DlDef(def @ DefLabel(_))) => { // Since this def is a label, it is never read. @@ -4716,11 +4715,11 @@ fn record_def(&mut self, node_id: NodeId, (def, lp): (Def, LastPrivate)) { // the same conclusion! - nmatsakis Occupied(entry) => if def != *entry.get() { self.session - .bug(format!("node_id {} resolved first to {:?} and \ + .bug(&format!("node_id {} resolved first to {:?} and \ then {:?}", node_id, *entry.get(), - def).index(&FullRange)); + def)[]); }, Vacant(entry) => { entry.insert(def); }, } @@ -4734,9 +4733,9 @@ fn enforce_default_binding_mode(&mut self, BindByValue(_) => {} BindByRef(..) => { self.resolve_error(pat.span, - format!("cannot use `ref` binding mode \ + &format!("cannot use `ref` binding mode \ with {}", - descr).index(&FullRange)); + descr)[]); } } } @@ -4771,8 +4770,8 @@ fn collect_mod(names: &mut Vec, module: &Module) { if names.len() == 0 { return "???".to_string(); } - self.names_to_string(names.into_iter().rev() - .collect::>().index(&FullRange)) + self.names_to_string(&names.into_iter().rev() + .collect::>()[]) } #[allow(dead_code)] // useful for debugging diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 26241ace76f..43f8c677e30 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -128,7 +128,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |&: s: String, span: Option| { - creader::validate_crate_name(sess, s.index(&FullRange), span); + creader::validate_crate_name(sess, &s[], span); s }; @@ -146,7 +146,7 @@ pub fn find_crate_name(sess: Option<&Session>, let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, msg.index(&FullRange)); + sess.span_err(attr.span, &msg[]); } } return validate(s.clone(), None); @@ -192,17 +192,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>, // to be independent of one another in the crate. symbol_hasher.reset(); - symbol_hasher.input_str(link_meta.crate_name.index(&FullRange)); + symbol_hasher.input_str(&link_meta.crate_name[]); symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); for meta in tcx.sess.crate_metadata.borrow().iter() { - symbol_hasher.input_str(meta.index(&FullRange)); + symbol_hasher.input_str(&meta[]); } symbol_hasher.input_str("-"); - symbol_hasher.input_str(encoder::encoded_ty(tcx, t).index(&FullRange)); + symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]); // Prefix with 'h' so that it never blends into adjacent digits let mut hash = String::from_str("h"); - hash.push_str(truncated_hash_result(symbol_hasher).index(&FullRange)); + hash.push_str(&truncated_hash_result(symbol_hasher)[]); hash } @@ -251,7 +251,7 @@ pub fn sanitize(s: &str) -> String { let mut tstr = String::new(); for c in c.escape_unicode() { tstr.push(c) } result.push('$'); - result.push_str(tstr.index(&(1..))); + result.push_str(&tstr[1..]); } } } @@ -260,7 +260,7 @@ pub fn sanitize(s: &str) -> String { if result.len() > 0u && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", result.index(&FullRange)); + return format!("_{}", &result[]); } return result; @@ -286,12 +286,12 @@ pub fn mangle>(mut path: PI, fn push(n: &mut String, s: &str) { let sani = sanitize(s); - n.push_str(format!("{}{}", sani.len(), sani).index(&FullRange)); + n.push_str(&format!("{}{}", sani.len(), sani)[]); } // First, connect each component with pairs. for e in path { - push(&mut n, token::get_name(e.name()).get().index(&FullRange)) + push(&mut n, &token::get_name(e.name()).get()[]) } match hash { @@ -329,17 +329,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl hash.push(EXTRA_CHARS.as_bytes()[extra2] as char); hash.push(EXTRA_CHARS.as_bytes()[extra3] as char); - exported_name(path, hash.index(&FullRange)) + exported_name(path, &hash[]) } pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, name: &str) -> String { let s = ppaux::ty_to_string(ccx.tcx(), t); - let path = [PathName(token::intern(s.index(&FullRange))), + let path = [PathName(token::intern(&s[])), gensym_name(name)]; let hash = get_symbol_hash(ccx, t); - mangle(ast_map::Values(path.iter()), Some(hash.index(&FullRange))) + mangle(ast_map::Values(path.iter()), Some(&hash[])) } pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String { @@ -357,9 +357,9 @@ pub fn remove(sess: &Session, path: &Path) { match fs::unlink(path) { Ok(..) => {} Err(e) => { - sess.err(format!("failed to remove {}: {}", + sess.err(&format!("failed to remove {}: {}", path.display(), - e).index(&FullRange)); + e)[]); } } } @@ -373,8 +373,8 @@ pub fn link_binary(sess: &Session, let mut out_filenames = Vec::new(); for &crate_type in sess.crate_types.borrow().iter() { if invalid_output_for_target(sess, crate_type) { - sess.bug(format!("invalid output type `{:?}` for target os `{}`", - crate_type, sess.opts.target_triple).index(&FullRange)); + sess.bug(&format!("invalid output type `{:?}` for target os `{}`", + crate_type, sess.opts.target_triple)[]); } let out_file = link_binary_output(sess, trans, crate_type, outputs, crate_name); @@ -439,8 +439,8 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.rlib", libname)) } config::CrateTypeDylib => { - let (prefix, suffix) = (sess.target.target.options.dll_prefix.index(&FullRange), - sess.target.target.options.dll_suffix.index(&FullRange)); + let (prefix, suffix) = (&sess.target.target.options.dll_prefix[], + &sess.target.target.options.dll_suffix[]); out_filename.with_filename(format!("{}{}{}", prefix, libname, @@ -450,7 +450,7 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.a", libname)) } config::CrateTypeExecutable => { - let suffix = sess.target.target.options.exe_suffix.index(&FullRange); + let suffix = &sess.target.target.options.exe_suffix[]; out_filename.with_filename(format!("{}{}", libname, suffix)) } } @@ -477,14 +477,14 @@ fn link_binary_output(sess: &Session, let obj_is_writeable = is_writeable(&obj_filename); let out_is_writeable = is_writeable(&out_filename); if !out_is_writeable { - sess.fatal(format!("output file {} is not writeable -- check its \ + sess.fatal(&format!("output file {} is not writeable -- check its \ permissions.", - out_filename.display()).index(&FullRange)); + out_filename.display())[]); } else if !obj_is_writeable { - sess.fatal(format!("object file {} is not writeable -- check its \ + sess.fatal(&format!("object file {} is not writeable -- check its \ permissions.", - obj_filename.display()).index(&FullRange)); + obj_filename.display())[]); } match crate_type { @@ -539,7 +539,7 @@ fn link_rlib<'a>(sess: &'a Session, for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() { match kind { cstore::NativeStatic => { - ab.add_native_library(l.index(&FullRange)).unwrap(); + ab.add_native_library(&l[]).unwrap(); } cstore::NativeFramework | cstore::NativeUnknown => {} } @@ -586,13 +586,13 @@ fn link_rlib<'a>(sess: &'a Session, // the same filename for metadata (stomping over one another) let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir"); let metadata = tmpdir.path().join(METADATA_FILENAME); - match fs::File::create(&metadata).write(trans.metadata - .index(&FullRange)) { + match fs::File::create(&metadata).write(&trans.metadata + []) { Ok(..) => {} Err(e) => { - sess.err(format!("failed to write {}: {}", + sess.err(&format!("failed to write {}: {}", metadata.display(), - e).index(&FullRange)); + e)[]); sess.abort_if_errors(); } } @@ -610,25 +610,25 @@ fn link_rlib<'a>(sess: &'a Session, // was exactly 16 bytes. let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice()); let bc_deflated_filename = obj_filename.with_extension( - format!("{}.bytecode.deflate", i).index(&FullRange)); + &format!("{}.bytecode.deflate", i)[]); let bc_data = match fs::File::open(&bc_filename).read_to_end() { Ok(buffer) => buffer, - Err(e) => sess.fatal(format!("failed to read bytecode: {}", - e).index(&FullRange)) + Err(e) => sess.fatal(&format!("failed to read bytecode: {}", + e)[]) }; - let bc_data_deflated = match flate::deflate_bytes(bc_data.index(&FullRange)) { + let bc_data_deflated = match flate::deflate_bytes(&bc_data[]) { Some(compressed) => compressed, - None => sess.fatal(format!("failed to compress bytecode from {}", - bc_filename.display()).index(&FullRange)) + None => sess.fatal(&format!("failed to compress bytecode from {}", + bc_filename.display())[]) }; let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, Err(e) => { - sess.fatal(format!("failed to create compressed bytecode \ - file: {}", e).index(&FullRange)) + sess.fatal(&format!("failed to create compressed bytecode \ + file: {}", e)[]) } }; @@ -636,8 +636,8 @@ fn link_rlib<'a>(sess: &'a Session, bc_data_deflated.as_slice()) { Ok(()) => {} Err(e) => { - sess.err(format!("failed to write compressed bytecode: \ - {}", e).index(&FullRange)); + sess.err(&format!("failed to write compressed bytecode: \ + {}", e)[]); sess.abort_if_errors() } }; @@ -677,7 +677,7 @@ fn write_rlib_bytecode_object_v1(writer: &mut T, try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) }; try! { writer.write_le_u32(1) }; try! { writer.write_le_u64(bc_data_deflated_size) }; - try! { writer.write(bc_data_deflated.index(&FullRange)) }; + try! { writer.write(&bc_data_deflated[]) }; let number_of_bytes_written_so_far = RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id @@ -727,12 +727,12 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { let ref name = sess.cstore.get_crate_data(cnum).name; let p = match *path { Some(ref p) => p.clone(), None => { - sess.err(format!("could not find rlib for: `{}`", - name).index(&FullRange)); + sess.err(&format!("could not find rlib for: `{}`", + name)[]); continue } }; - ab.add_rlib(&p, name.index(&FullRange), sess.lto()).unwrap(); + ab.add_rlib(&p, &name[], sess.lto()).unwrap(); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); all_native_libs.extend(native_libs.into_iter()); @@ -754,7 +754,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { cstore::NativeUnknown => "library", cstore::NativeFramework => "framework", }; - sess.note(format!("{}: {}", name, *lib).index(&FullRange)); + sess.note(&format!("{}: {}", name, *lib)[]); } } @@ -768,12 +768,12 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, // The invocations of cc share some flags across platforms let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.index(&FullRange)); + let mut cmd = Command::new(&pname[]); - cmd.args(sess.target.target.options.pre_link_args.index(&FullRange)); + cmd.args(&sess.target.target.options.pre_link_args[]); link_args(&mut cmd, sess, dylib, tmpdir.path(), trans, obj_filename, out_filename); - cmd.args(sess.target.target.options.post_link_args.index(&FullRange)); + cmd.args(&sess.target.target.options.post_link_args[]); if !sess.target.target.options.no_compiler_rt { cmd.arg("-lcompiler-rt"); } @@ -791,22 +791,22 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, match prog { Ok(prog) => { if !prog.status.success() { - sess.err(format!("linking with `{}` failed: {}", + sess.err(&format!("linking with `{}` failed: {}", pname, - prog.status).index(&FullRange)); - sess.note(format!("{}", &cmd).index(&FullRange)); + prog.status)[]); + sess.note(&format!("{}", &cmd)[]); let mut output = prog.error.clone(); - output.push_all(prog.output.index(&FullRange)); - sess.note(str::from_utf8(output.index(&FullRange)).unwrap()); + output.push_all(&prog.output[]); + sess.note(str::from_utf8(&output[]).unwrap()); sess.abort_if_errors(); } debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap()); debug!("linker stdout:\n{}", String::from_utf8(prog.output).unwrap()); }, Err(e) => { - sess.err(format!("could not exec the linker `{}`: {}", + sess.err(&format!("could not exec the linker `{}`: {}", pname, - e).index(&FullRange)); + e)[]); sess.abort_if_errors(); } } @@ -818,7 +818,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, match Command::new("dsymutil").arg(out_filename).output() { Ok(..) => {} Err(e) => { - sess.err(format!("failed to run dsymutil: {}", e).index(&FullRange)); + sess.err(&format!("failed to run dsymutil: {}", e)[]); sess.abort_if_errors(); } } @@ -867,7 +867,7 @@ fn link_args(cmd: &mut Command, let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(morestack.as_vec()); - cmd.arg(v.index(&FullRange)); + cmd.arg(&v[]); } else { cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]); } @@ -992,7 +992,7 @@ fn link_args(cmd: &mut Command, if sess.opts.cg.rpath { let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec(); v.push_all(out_filename.filename().unwrap()); - cmd.arg(v.index(&FullRange)); + cmd.arg(&v[]); } } else { cmd.arg("-shared"); @@ -1004,7 +1004,7 @@ fn link_args(cmd: &mut Command, // addl_lib_search_paths if sess.opts.cg.rpath { let sysroot = sess.sysroot(); - let target_triple = sess.opts.target_triple.index(&FullRange); + let target_triple = &sess.opts.target_triple[]; let get_install_prefix_lib_path = |:| { let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); let tlib = filesearch::relative_target_lib_path(sysroot, target_triple); @@ -1021,14 +1021,14 @@ fn link_args(cmd: &mut Command, get_install_prefix_lib_path: get_install_prefix_lib_path, realpath: ::util::fs::realpath }; - cmd.args(rpath::get_rpath_flags(rpath_config).index(&FullRange)); + cmd.args(&rpath::get_rpath_flags(rpath_config)[]); } // Finally add all the linker arguments provided on the command line along // with any #[link_args] attributes found inside the crate let empty = Vec::new(); - cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).index(&FullRange)); - cmd.args(used_link_args.index(&FullRange)); + cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]); + cmd.args(&used_link_args[]); } // # Native library linking @@ -1082,14 +1082,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { } else { // -force_load is the OSX equivalent of --whole-archive, but it // involves passing the full path to the library to link. - let lib = archive::find_library(l.index(&FullRange), + let lib = archive::find_library(&l[], sess.target.target.options.staticlib_prefix.as_slice(), sess.target.target.options.staticlib_suffix.as_slice(), - search_path.index(&FullRange), + &search_path[], &sess.diagnostic().handler); let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(lib.as_vec()); - cmd.arg(v.index(&FullRange)); + cmd.arg(&v[]); } } if takes_hints { @@ -1102,7 +1102,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { cmd.arg(format!("-l{}", l)); } cstore::NativeFramework => { - cmd.arg("-framework").arg(l.index(&FullRange)); + cmd.arg("-framework").arg(&l[]); } cstore::NativeStatic => unreachable!(), } @@ -1158,7 +1158,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // Converts a library file-stem into a cc -l argument fn unlib<'a>(config: &config::Config, stem: &'a [u8]) -> &'a [u8] { if stem.starts_with("lib".as_bytes()) && !config.target.options.is_like_windows { - stem.index(&(3..)) + &stem[3..] } else { stem } @@ -1183,18 +1183,18 @@ fn add_static_crate(cmd: &mut Command, sess: &Session, tmpdir: &Path, // against the archive. if sess.lto() { let name = cratepath.filename_str().unwrap(); - let name = name.index(&(3..(name.len() - 5))); // chop off lib/.rlib + let name = &name[3..(name.len() - 5)]; // chop off lib/.rlib time(sess.time_passes(), - format!("altering {}.rlib", name).index(&FullRange), + &format!("altering {}.rlib", name)[], (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); match fs::copy(&cratepath, &dst) { Ok(..) => {} Err(e) => { - sess.err(format!("failed to copy {} to {}: {}", + sess.err(&format!("failed to copy {} to {}: {}", cratepath.display(), dst.display(), - e).index(&FullRange)); + e)[]); sess.abort_if_errors(); } } @@ -1204,9 +1204,9 @@ fn add_static_crate(cmd: &mut Command, sess: &Session, tmpdir: &Path, match fs::chmod(&dst, io::USER_READ | io::USER_WRITE) { Ok(..) => {} Err(e) => { - sess.err(format!("failed to chmod {} when preparing \ + sess.err(&format!("failed to chmod {} when preparing \ for LTO: {}", dst.display(), - e).index(&FullRange)); + e)[]); sess.abort_if_errors(); } } @@ -1220,9 +1220,9 @@ fn add_static_crate(cmd: &mut Command, sess: &Session, tmpdir: &Path, maybe_ar_prog: sess.opts.cg.ar.clone() }; let mut archive = Archive::open(config); - archive.remove_file(format!("{}.o", name).index(&FullRange)); + archive.remove_file(&format!("{}.o", name)[]); let files = archive.files(); - if files.iter().any(|s| s.index(&FullRange).ends_with(".o")) { + if files.iter().any(|s| s[].ends_with(".o")) { cmd.arg(dst); } }); @@ -1244,7 +1244,7 @@ fn add_dynamic_crate(cmd: &mut Command, sess: &Session, cratepath: Path) { let mut v = "-l".as_bytes().to_vec(); v.push_all(unlib(&sess.target, cratepath.filestem().unwrap())); - cmd.arg(v.index(&FullRange)); + cmd.arg(&v[]); } } @@ -1286,7 +1286,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { } cstore::NativeFramework => { cmd.arg("-framework"); - cmd.arg(lib.index(&FullRange)); + cmd.arg(&lib[]); } cstore::NativeStatic => { sess.bug("statics shouldn't be propagated"); diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index ecf2e9ed724..e457de6bc77 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -53,30 +53,30 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, let path = match path { Some(p) => p, None => { - sess.fatal(format!("could not find rlib for: `{}`", - name).index(&FullRange)); + sess.fatal(&format!("could not find rlib for: `{}`", + name)[]); } }; let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let file = path.filename_str().unwrap(); - let file = file.index(&(3..(file.len() - 5))); // chop off lib/.rlib + let file = &file[3..(file.len() - 5)]; // chop off lib/.rlib debug!("reading {}", file); for i in iter::count(0u, 1) { let bc_encoded = time(sess.time_passes(), format!("check for {}.{}.bytecode.deflate", name, i).as_slice(), (), |_| { - archive.read(format!("{}.{}.bytecode.deflate", - file, i).index(&FullRange)) + archive.read(&format!("{}.{}.bytecode.deflate", + file, i)[]) }); let bc_encoded = match bc_encoded { Some(data) => data, None => { if i == 0 { // No bitcode was found at all. - sess.fatal(format!("missing compressed bytecode in {}", - path.display()).index(&FullRange)); + sess.fatal(&format!("missing compressed bytecode in {}", + path.display())[]); } // No more bitcode files to read. break; @@ -91,20 +91,20 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, if version == 1 { // The only version existing so far let data_size = extract_compressed_bytecode_size_v1(bc_encoded); - let compressed_data = bc_encoded.index(&( + let compressed_data = &bc_encoded[ link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET.. - (link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint))); + (link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint)]; match flate::inflate_bytes(compressed_data) { Some(inflated) => inflated, None => { - sess.fatal(format!("failed to decompress bc of `{}`", - name).index(&FullRange)) + sess.fatal(&format!("failed to decompress bc of `{}`", + name)[]) } } } else { - sess.fatal(format!("Unsupported bytecode format version {}", - version).index(&FullRange)) + sess.fatal(&format!("Unsupported bytecode format version {}", + version)[]) } }) } else { @@ -114,8 +114,8 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, match flate::inflate_bytes(bc_encoded) { Some(bc) => bc, None => { - sess.fatal(format!("failed to decompress bc of `{}`", - name).index(&FullRange)) + sess.fatal(&format!("failed to decompress bc of `{}`", + name)[]) } } }) @@ -124,7 +124,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, let ptr = bc_decoded.as_slice().as_ptr(); debug!("linking {}, part {}", name, i); time(sess.time_passes(), - format!("ll link {}.{}", name, i).index(&FullRange), + &format!("ll link {}.{}", name, i)[], (), |()| unsafe { if !llvm::LLVMRustLinkInExternalBitcode(llmod, @@ -132,7 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic().handler(), format!("failed to load bc of `{}`", - name.index(&FullRange))); + &name[])); } }); } @@ -186,7 +186,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, fn is_versioned_bytecode_format(bc: &[u8]) -> bool { let magic_id_byte_count = link::RLIB_BYTECODE_OBJECT_MAGIC.len(); return bc.len() > magic_id_byte_count && - bc.index(&(0..magic_id_byte_count)) == link::RLIB_BYTECODE_OBJECT_MAGIC; + &bc[0..magic_id_byte_count] == link::RLIB_BYTECODE_OBJECT_MAGIC; } fn extract_bytecode_format_version(bc: &[u8]) -> u32 { @@ -198,8 +198,7 @@ fn extract_compressed_bytecode_size_v1(bc: &[u8]) -> u64 { } fn read_from_le_bytes(bytes: &[u8], position_in_bytes: uint) -> T { - let byte_data = bytes.index(&(position_in_bytes.. - (position_in_bytes + mem::size_of::()))); + let byte_data = &bytes[position_in_bytes..(position_in_bytes + mem::size_of::())]; let data = unsafe { *(byte_data.as_ptr() as *const T) }; diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 8a80019143e..e0ba6d569cc 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -47,14 +47,14 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! { unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { - handler.fatal(msg.index(&FullRange)); + handler.fatal(&msg[]); } else { let err = ffi::c_str_to_bytes(&cstr); let err = String::from_utf8_lossy(err.as_slice()).to_string(); libc::free(cstr as *mut _); - handler.fatal(format!("{}: {}", - msg.index(&FullRange), - err.index(&FullRange)).index(&FullRange)); + handler.fatal(&format!("{}: {}", + &msg[], + &err[])[]); } } } @@ -104,13 +104,13 @@ fn dump(&mut self, handler: &Handler) { match diag.code { Some(ref code) => { handler.emit_with_code(None, - diag.msg.index(&FullRange), - code.index(&FullRange), + &diag.msg[], + &code[], diag.lvl); }, None => { handler.emit(None, - diag.msg.index(&FullRange), + &diag.msg[], diag.lvl); }, } @@ -165,8 +165,8 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { fn create_target_machine(sess: &Session) -> TargetMachineRef { let reloc_model_arg = match sess.opts.cg.relocation_model { - Some(ref s) => s.index(&FullRange), - None => sess.target.target.options.relocation_model.index(&FullRange) + Some(ref s) => &s[], + None => &sess.target.target.options.relocation_model[] }; let reloc_model = match reloc_model_arg { "pic" => llvm::RelocPIC, @@ -174,10 +174,10 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { "default" => llvm::RelocDefault, "dynamic-no-pic" => llvm::RelocDynamicNoPic, _ => { - sess.err(format!("{:?} is not a valid relocation mode", + sess.err(&format!("{:?} is not a valid relocation mode", sess.opts .cg - .relocation_model).index(&FullRange)); + .relocation_model)[]); sess.abort_if_errors(); unreachable!(); } @@ -198,8 +198,8 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => s.index(&FullRange), - None => sess.target.target.options.code_model.index(&FullRange) + Some(ref s) => &s[], + None => &sess.target.target.options.code_model[] }; let code_model = match code_model_arg { @@ -209,16 +209,16 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { "medium" => llvm::CodeModelMedium, "large" => llvm::CodeModelLarge, _ => { - sess.err(format!("{:?} is not a valid code model", + sess.err(&format!("{:?} is not a valid code model", sess.opts .cg - .code_model).index(&FullRange)); + .code_model)[]); sess.abort_if_errors(); unreachable!(); } }; - let triple = sess.target.target.llvm_target.index(&FullRange); + let triple = &sess.target.target.llvm_target[]; let tm = unsafe { let triple = CString::from_slice(triple.as_bytes()); @@ -350,13 +350,13 @@ struct HandlerFreeVars<'a> { match cgcx.lto_ctxt { Some((sess, _)) => { sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info { - Some(ei) => sess.span_err(ei.call_site, msg.index(&FullRange)), - None => sess.err(msg.index(&FullRange)), + Some(ei) => sess.span_err(ei.call_site, &msg[]), + None => sess.err(&msg[]), }); } None => { - cgcx.handler.err(msg.index(&FullRange)); + cgcx.handler.err(&msg[]); cgcx.handler.note("build without -C codegen-units for more exact errors"); } } @@ -518,14 +518,14 @@ unsafe fn with_codegen(tm: TargetMachineRef, } if config.emit_asm { - let path = output_names.with_extension(format!("{}.s", name_extra).index(&FullRange)); + let path = output_names.with_extension(&format!("{}.s", name_extra)[]); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType); }); } if config.emit_obj { - let path = output_names.with_extension(format!("{}.o", name_extra).index(&FullRange)); + let path = output_names.with_extension(&format!("{}.o", name_extra)[]); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType); }); @@ -639,7 +639,7 @@ pub fn run_passes(sess: &Session, // Process the work items, optionally using worker threads. if sess.opts.cg.codegen_units == 1 { - run_work_singlethreaded(sess, trans.reachable.index(&FullRange), work_items); + run_work_singlethreaded(sess, &trans.reachable[], work_items); } else { run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units); } @@ -666,8 +666,8 @@ pub fn run_passes(sess: &Session, if crate_output.single_output_file.is_some() { // 2) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. - sess.warn(format!("ignoring -o because multiple .{} files were produced", - ext).index(&FullRange)); + sess.warn(&format!("ignoring -o because multiple .{} files were produced", + ext)[]); } else { // 3) Multiple codegen units, but no `-o some_name`. We // just leave the `foo.0.x` files in place. @@ -700,20 +700,20 @@ pub fn run_passes(sess: &Session, }; let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.index(&FullRange)); + let mut cmd = Command::new(&pname[]); - cmd.args(sess.target.target.options.pre_link_args.index(&FullRange)); + cmd.args(&sess.target.target.options.pre_link_args[]); cmd.arg("-nostdlib"); for index in range(0, trans.modules.len()) { - cmd.arg(crate_output.with_extension(format!("{}.o", index).index(&FullRange))); + cmd.arg(crate_output.with_extension(&format!("{}.o", index)[])); } cmd.arg("-r") .arg("-o") .arg(windows_output_path.as_ref().unwrap_or(output_path)); - cmd.args(sess.target.target.options.post_link_args.index(&FullRange)); + cmd.args(&sess.target.target.options.post_link_args[]); if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 { println!("{}", &cmd); @@ -725,15 +725,15 @@ pub fn run_passes(sess: &Session, match cmd.status() { Ok(status) => { if !status.success() { - sess.err(format!("linking of {} with `{}` failed", - output_path.display(), cmd).index(&FullRange)); + sess.err(&format!("linking of {} with `{}` failed", + output_path.display(), cmd)[]); sess.abort_if_errors(); } }, Err(e) => { - sess.err(format!("could not exec the linker `{}`: {}", + sess.err(&format!("could not exec the linker `{}`: {}", pname, - e).index(&FullRange)); + e)[]); sess.abort_if_errors(); }, } @@ -818,12 +818,12 @@ pub fn run_passes(sess: &Session, for i in range(0, trans.modules.len()) { if modules_config.emit_obj { let ext = format!("{}.o", i); - remove(sess, &crate_output.with_extension(ext.index(&FullRange))); + remove(sess, &crate_output.with_extension(&ext[])); } if modules_config.emit_bc && !keep_numbered_bitcode { let ext = format!("{}.bc", i); - remove(sess, &crate_output.with_extension(ext.index(&FullRange))); + remove(sess, &crate_output.with_extension(&ext[])); } } @@ -949,7 +949,7 @@ fn run_work_multithreaded(sess: &Session, pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.index(&FullRange)); + let mut cmd = Command::new(&pname[]); cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject)) .arg(outputs.temp_path(config::OutputTypeAssembly)); @@ -958,20 +958,20 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { match cmd.output() { Ok(prog) => { if !prog.status.success() { - sess.err(format!("linking with `{}` failed: {}", + sess.err(&format!("linking with `{}` failed: {}", pname, - prog.status).index(&FullRange)); - sess.note(format!("{}", &cmd).index(&FullRange)); + prog.status)[]); + sess.note(&format!("{}", &cmd)[]); let mut note = prog.error.clone(); - note.push_all(prog.output.index(&FullRange)); - sess.note(str::from_utf8(note.index(&FullRange)).unwrap()); + note.push_all(&prog.output[]); + sess.note(str::from_utf8(¬e[]).unwrap()); sess.abort_if_errors(); } }, Err(e) => { - sess.err(format!("could not exec the linker `{}`: {}", + sess.err(&format!("could not exec the linker `{}`: {}", pname, - e).index(&FullRange)); + e)[]); sess.abort_if_errors(); } } @@ -1004,7 +1004,7 @@ unsafe fn configure_llvm(sess: &Session) { if sess.print_llvm_passes() { add("-debug-pass=Structure"); } for arg in sess.opts.cg.llvm_args.iter() { - add((*arg).index(&FullRange)); + add(&(*arg)[]); } } diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index 35f168f092a..eb163ed7406 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -94,7 +94,7 @@ fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) { // dump info about all the external crates referenced from this crate self.sess.cstore.iter_crate_data(|n, cmd| { - self.fmt.external_crate_str(krate.span, cmd.name.index(&FullRange), n); + self.fmt.external_crate_str(krate.span, &cmd.name[], n); }); self.fmt.recorder.record("end_external_crates\n"); } @@ -143,7 +143,7 @@ fn write_sub_paths(&mut self, path: &ast::Path) { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.index(&FullRange), + &qualname[], self.cur_scope); } } @@ -161,7 +161,7 @@ fn write_sub_paths_truncated(&mut self, path: &ast::Path) { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.index(&FullRange), + &qualname[], self.cur_scope); } } @@ -180,17 +180,17 @@ fn write_sub_path_trait_truncated(&mut self, path: &ast::Path) { let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, - qualname.index(&FullRange)); + &qualname[]); // write the other sub-paths if len <= 2 { return; } - let sub_paths = sub_paths.index(&(0..(len-2))); + let sub_paths = &sub_paths[0..(len-2)]; for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.index(&FullRange), + &qualname[], self.cur_scope); } } @@ -198,8 +198,8 @@ fn write_sub_path_trait_truncated(&mut self, path: &ast::Path) { // looks up anything, not just a type fn lookup_type_ref(&self, ref_id: NodeId) -> Option { if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) { - self.sess.bug(format!("def_map has no key for {} in lookup_type_ref", - ref_id).index(&FullRange)); + self.sess.bug(&format!("def_map has no key for {} in lookup_type_ref", + ref_id)[]); } let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id]; match def { @@ -211,8 +211,8 @@ fn lookup_type_ref(&self, ref_id: NodeId) -> Option { fn lookup_def_kind(&self, ref_id: NodeId, span: Span) -> Option { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&ref_id) { - self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind", - ref_id).index(&FullRange)); + self.sess.span_bug(span, &format!("def_map has no key for {} in lookup_def_kind", + ref_id)[]); } let def = (*def_map)[ref_id]; match def { @@ -240,8 +240,8 @@ fn lookup_def_kind(&self, ref_id: NodeId, span: Span) -> Option { def::DefUse(_) | def::DefMethod(..) | def::DefPrimTy(_) => { - self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {:?}", - def).index(&FullRange)); + self.sess.span_bug(span, &format!("lookup_def_kind for unexpected item: {:?}", + def)[]); }, } } @@ -262,8 +262,8 @@ fn process_formals(&mut self, formals: &Vec, qualname: &str) { span_utils.span_for_last_ident(p.span), id, qualname, - path_to_string(p).index(&FullRange), - typ.index(&FullRange)); + &path_to_string(p)[], + &typ[]); } self.collected_paths.clear(); } @@ -285,7 +285,7 @@ fn process_method(&mut self, method: &ast::Method) { match item.node { ast::ItemImpl(_, _, _, _, ref ty, _) => { let mut result = String::from_str("<"); - result.push_str(ty_to_string(&**ty).index(&FullRange)); + result.push_str(&ty_to_string(&**ty)[]); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(method.id)) { @@ -301,18 +301,18 @@ fn process_method(&mut self, method: &ast::Method) { } _ => { self.sess.span_bug(method.span, - format!("Container {} for method {} not an impl?", - impl_id.node, method.id).index(&FullRange)); + &format!("Container {} for method {} not an impl?", + impl_id.node, method.id)[]); }, } }, _ => { self.sess.span_bug(method.span, - format!("Container {} for method {} is not a node item {:?}", - impl_id.node, - method.id, - self.analysis.ty_cx.map.get(impl_id.node) - ).index(&FullRange)); + &format!( + "Container {} for method {} is not a node item {:?}", + impl_id.node, + method.id, + self.analysis.ty_cx.map.get(impl_id.node))[]); }, }, None => match ty::trait_of_item(&self.analysis.ty_cx, @@ -327,21 +327,21 @@ fn process_method(&mut self, method: &ast::Method) { } _ => { self.sess.span_bug(method.span, - format!("Could not find container {} for method {}", - def_id.node, method.id).index(&FullRange)); + &format!("Could not find container {} for method {}", + def_id.node, method.id)[]); } } }, None => { self.sess.span_bug(method.span, - format!("Could not find container for method {}", - method.id).index(&FullRange)); + &format!("Could not find container for method {}", + method.id)[]); }, }, }; qualname.push_str(get_ident(method.pe_ident()).get()); - let qualname = qualname.index(&FullRange); + let qualname = &qualname[]; // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, @@ -430,13 +430,13 @@ fn process_struct_field_def(&mut self, Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, - name.get().index(&FullRange), - qualname.index(&FullRange), - typ.index(&FullRange), + &name.get()[], + &qualname[], + &typ[], scope_id), None => self.sess.span_bug(field.span, - format!("Could not find sub-span for field {}", - qualname).index(&FullRange)), + &format!("Could not find sub-span for field {}", + qualname)[]), } }, _ => (), @@ -463,7 +463,7 @@ fn process_generic_params(&mut self, generics:&ast::Generics, self.fmt.typedef_str(full_span, Some(*param_ss), param.id, - name.index(&FullRange), + &name[], ""); } self.visit_generics(generics); @@ -480,10 +480,10 @@ fn process_fn(&mut self, self.fmt.fn_str(item.span, sub_span, item.id, - qualname.index(&FullRange), + &qualname[], self.cur_scope); - self.process_formals(&decl.inputs, qualname.index(&FullRange)); + self.process_formals(&decl.inputs, &qualname[]); // walk arg and return types for arg in decl.inputs.iter() { @@ -497,7 +497,7 @@ fn process_fn(&mut self, // walk the body self.nest(item.id, |v| v.visit_block(&*body)); - self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id); + self.process_generic_params(ty_params, item.span, &qualname[], item.id); } fn process_static(&mut self, @@ -519,9 +519,9 @@ fn process_static(&mut self, sub_span, item.id, get_ident(item.ident).get(), - qualname.index(&FullRange), - value.index(&FullRange), - ty_to_string(&*typ).index(&FullRange), + &qualname[], + &value[], + &ty_to_string(&*typ)[], self.cur_scope); // walk type and init value @@ -542,9 +542,9 @@ fn process_const(&mut self, sub_span, item.id, get_ident(item.ident).get(), - qualname.index(&FullRange), + &qualname[], "", - ty_to_string(&*typ).index(&FullRange), + &ty_to_string(&*typ)[], self.cur_scope); // walk type and init value @@ -568,17 +568,17 @@ fn process_struct(&mut self, sub_span, item.id, ctor_id, - qualname.index(&FullRange), + &qualname[], self.cur_scope, - val.index(&FullRange)); + &val[]); // fields for field in def.fields.iter() { - self.process_struct_field_def(field, qualname.index(&FullRange), item.id); + self.process_struct_field_def(field, &qualname[], item.id); self.visit_ty(&*field.node.ty); } - self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id); + self.process_generic_params(ty_params, item.span, &qualname[], item.id); } fn process_enum(&mut self, @@ -591,12 +591,12 @@ fn process_enum(&mut self, Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, - enum_name.index(&FullRange), + &enum_name[], self.cur_scope, - val.index(&FullRange)), + &val[]), None => self.sess.span_bug(item.span, - format!("Could not find subspan for enum {}", - enum_name).index(&FullRange)), + &format!("Could not find subspan for enum {}", + enum_name)[]), } for variant in enum_definition.variants.iter() { let name = get_ident(variant.node.name); @@ -612,9 +612,9 @@ fn process_enum(&mut self, self.span.span_for_first_ident(variant.span), variant.node.id, name, - qualname.index(&FullRange), - enum_name.index(&FullRange), - val.index(&FullRange), + &qualname[], + &enum_name[], + &val[], item.id); for arg in args.iter() { self.visit_ty(&*arg.ty); @@ -630,9 +630,9 @@ fn process_enum(&mut self, self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, - qualname.index(&FullRange), - enum_name.index(&FullRange), - val.index(&FullRange), + &qualname[], + &enum_name[], + &val[], item.id); for field in struct_def.fields.iter() { @@ -643,7 +643,7 @@ fn process_enum(&mut self, } } - self.process_generic_params(ty_params, item.span, enum_name.index(&FullRange), item.id); + self.process_generic_params(ty_params, item.span, &enum_name[], item.id); } fn process_impl(&mut self, @@ -703,9 +703,9 @@ fn process_trait(&mut self, self.fmt.trait_str(item.span, sub_span, item.id, - qualname.index(&FullRange), + &qualname[], self.cur_scope, - val.index(&FullRange)); + &val[]); // super-traits for super_bound in trait_refs.iter() { @@ -737,7 +737,7 @@ fn process_trait(&mut self, } // walk generics and methods - self.process_generic_params(generics, item.span, qualname.index(&FullRange), item.id); + self.process_generic_params(generics, item.span, &qualname[], item.id); for method in methods.iter() { self.visit_trait_item(method) } @@ -755,9 +755,9 @@ fn process_mod(&mut self, self.fmt.mod_str(item.span, sub_span, item.id, - qualname.index(&FullRange), + &qualname[], self.cur_scope, - filename.index(&FullRange)); + &filename[]); self.nest(item.id, |v| visit::walk_mod(v, m)); } @@ -840,8 +840,8 @@ fn process_path(&mut self, def_id, self.cur_scope), _ => self.sess.span_bug(span, - format!("Unexpected def kind while looking up path in '{}'", - self.span.snippet(span)).index(&FullRange)), + &format!("Unexpected def kind while looking up path in '{}'", + self.span.snippet(span))[]), } // modules or types in the path prefix match *def { @@ -959,7 +959,7 @@ fn process_method_call(&mut self, self.cur_scope); // walk receiver and args - visit::walk_exprs(self, args.index(&FullRange)); + visit::walk_exprs(self, &args[]); } fn process_pat(&mut self, p:&ast::Pat) { @@ -975,8 +975,8 @@ fn process_pat(&mut self, p:&ast::Pat) { Some(sd) => sd, None => { self.sess.span_bug(p.span, - format!("Could not find struct_def for `{}`", - self.span.snippet(p.span)).index(&FullRange)); + &format!("Could not find struct_def for `{}`", + self.span.snippet(p.span))[]); } }; for &Spanned { node: ref field, span } in fields.iter() { @@ -1061,8 +1061,8 @@ fn visit_item(&mut self, item: &ast::Item) { self.fmt.typedef_str(item.span, sub_span, item.id, - qualname.index(&FullRange), - value.index(&FullRange)); + &qualname[], + &value[]); self.visit_ty(&**ty); self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); @@ -1121,13 +1121,13 @@ fn visit_trait_item(&mut self, tm: &ast::TraitItem) { }, None => { self.sess.span_bug(method_type.span, - format!("Could not find trait for method {}", - method_type.id).index(&FullRange)); + &format!("Could not find trait for method {}", + method_type.id)[]); }, }; qualname.push_str(get_ident(method_type.ident).get()); - let qualname = qualname.index(&FullRange); + let qualname = &qualname[]; let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn); self.fmt.method_decl_str(method_type.span, @@ -1262,7 +1262,7 @@ fn visit_view_item(&mut self, i: &ast::ViewItem) { id, cnum, name, - s.index(&FullRange), + &s[], self.cur_scope); }, } @@ -1371,8 +1371,8 @@ fn visit_expr(&mut self, ex: &ast::Expr) { } let mut id = String::from_str("$"); - id.push_str(ex.id.to_string().index(&FullRange)); - self.process_formals(&decl.inputs, id.index(&FullRange)); + id.push_str(&ex.id.to_string()[]); + self.process_formals(&decl.inputs, &id[]); // walk arg and return types for arg in decl.inputs.iter() { @@ -1418,8 +1418,8 @@ fn visit_arm(&mut self, arm: &ast::Arm) { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(p.span, - format!("def_map has no key for {} in visit_arm", - id).index(&FullRange)); + &format!("def_map has no key for {} in visit_arm", + id)[]); } let def = &(*def_map)[id]; match *def { @@ -1434,8 +1434,8 @@ fn visit_arm(&mut self, arm: &ast::Arm) { self.fmt.variable_str(p.span, Some(p.span), id, - path_to_string(p).index(&FullRange), - value.index(&FullRange), + &path_to_string(p)[], + &value[], "") } def::DefVariant(..) => { @@ -1490,9 +1490,9 @@ fn visit_local(&mut self, l: &ast::Local) { self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p).index(&FullRange), - value.index(&FullRange), - typ.index(&FullRange)); + &path_to_string(p)[], + &value[], + &typ[]); } self.collected_paths.clear(); @@ -1511,7 +1511,7 @@ pub fn process_crate(sess: &Session, } assert!(analysis.glob_map.is_some()); - let cratename = match attr::find_crate_name(krate.attrs.index(&FullRange)) { + let cratename = match attr::find_crate_name(&krate.attrs[]) { Some(name) => name.get().to_string(), None => { info!("Could not find crate name, using 'unknown_crate'"); @@ -1531,8 +1531,8 @@ pub fn process_crate(sess: &Session, }; match fs::mkdir_recursive(&root_path, io::USER_RWX) { - Err(e) => sess.err(format!("Could not create directory {}: {}", - root_path.display(), e).index(&FullRange)), + Err(e) => sess.err(&format!("Could not create directory {}: {}", + root_path.display(), e)[]), _ => (), } @@ -1549,7 +1549,7 @@ pub fn process_crate(sess: &Session, Ok(f) => box f, Err(e) => { let disp = root_path.display(); - sess.fatal(format!("Could not open {}: {}", disp, e).index(&FullRange)); + sess.fatal(&format!("Could not open {}: {}", disp, e)[]); } }; root_path.pop(); @@ -1575,7 +1575,7 @@ pub fn process_crate(sess: &Session, cur_scope: 0 }; - visitor.dump_crate_info(cratename.index(&FullRange), krate); + visitor.dump_crate_info(&cratename[], krate); visit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc_trans/save/recorder.rs b/src/librustc_trans/save/recorder.rs index bb0fb387002..23598751c08 100644 --- a/src/librustc_trans/save/recorder.rs +++ b/src/librustc_trans/save/recorder.rs @@ -41,7 +41,7 @@ pub fn dump_span(&mut self, assert!(self.dump_spans); let result = format!("span,kind,{},{},text,\"{}\"\n", kind, su.extent_str(span), escape(su.snippet(span))); - self.record(result.index(&FullRange)); + self.record(&result[]); } } @@ -158,17 +158,17 @@ pub fn make_values_str(&self, values: Vec, span: Span) -> Option { if values.len() != fields.len() { - self.span.sess.span_bug(span, format!( + self.span.sess.span_bug(span, &format!( "Mismatch between length of fields for '{}', expected '{}', found '{}'", - kind, fields.len(), values.len()).index(&FullRange)); + kind, fields.len(), values.len())[]); } let values = values.iter().map(|s| { // Never take more than 1020 chars if s.len() > 1020 { - s.index(&(0..1020)) + &s[0..1020] } else { - s.index(&FullRange) + &s[] } }); @@ -184,7 +184,7 @@ pub fn make_values_str(&self, } ))); Some(strs.fold(String::new(), |mut s, ss| { - s.push_str(ss.index(&FullRange)); + s.push_str(&ss[]); s })) } @@ -196,9 +196,9 @@ pub fn record_without_span(&mut self, let (label, ref fields, needs_span, dump_spans) = FmtStrs::lookup_row(kind); if needs_span { - self.span.sess.span_bug(span, format!( + self.span.sess.span_bug(span, &format!( "Called record_without_span for '{}' which does requires a span", - label).index(&FullRange)); + label)[]); } assert!(!dump_spans); @@ -212,9 +212,9 @@ pub fn record_without_span(&mut self, }; let mut result = String::from_str(label); - result.push_str(values_str.index(&FullRange)); + result.push_str(&values_str[]); result.push_str("\n"); - self.recorder.record(result.index(&FullRange)); + self.recorder.record(&result[]); } pub fn record_with_span(&mut self, @@ -245,7 +245,7 @@ pub fn record_with_span(&mut self, None => return, }; let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str); - self.recorder.record(result.index(&FullRange)); + self.recorder.record(&result[]); } pub fn check_and_record(&mut self, @@ -275,7 +275,7 @@ pub fn variable_str(&mut self, // variable def's node id let mut qualname = String::from_str(name); qualname.push_str("$"); - qualname.push_str(id.to_string().index(&FullRange)); + qualname.push_str(&id.to_string()[]); self.check_and_record(Variable, span, sub_span, diff --git a/src/librustc_trans/save/span_utils.rs b/src/librustc_trans/save/span_utils.rs index 8d249b8bfe9..77343612ac8 100644 --- a/src/librustc_trans/save/span_utils.rs +++ b/src/librustc_trans/save/span_utils.rs @@ -217,8 +217,8 @@ pub fn sub_span_for_type_name(&self, span: Span) -> Option { if bracket_count != 0 { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, - format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line).index(&FullRange)); + &format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", + self.snippet(span), loc.file.name, loc.line)[]); } if result.is_none() && prev.tok.is_ident() && bracket_count == 0 { return self.make_sub_span(span, Some(prev.sp)); @@ -242,9 +242,9 @@ pub fn spans_with_brackets(&self, span: Span, nesting: int, limit: int) -> Vec(bcx: Block<'blk, 'tcx>, let _indenter = indenter(); m.iter().filter_map(|br| { - e(br.pats.index(&FullRange)).map(|pats| { + e(&br.pats[]).map(|pats| { let this = br.pats[col]; let mut bound_ptrs = br.bound_ptrs.clone(); match this.node { @@ -471,8 +471,8 @@ fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Collect all of the matches that can match against anything. enter_match(bcx, dm, m, col, val, |pats| { if pat_is_binding_or_wild(dm, &*pats[col]) { - let mut r = pats.index(&(0..col)).to_vec(); - r.push_all(pats.index(&((col + 1)..))); + let mut r = pats[0..col].to_vec(); + r.push_all(&pats[(col + 1)..]); Some(r) } else { None @@ -548,7 +548,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( param_env: param_env, }; enter_match(bcx, dm, m, col, val, |pats| - check_match::specialize(&mcx, pats.index(&FullRange), &ctor, col, variant_size) + check_match::specialize(&mcx, &pats[], &ctor, col, variant_size) ) } @@ -789,8 +789,8 @@ fn compare_str<'blk, 'tcx>(cx: Block<'blk, 'tcx>, -> Result<'blk, 'tcx> { let did = langcall(cx, None, - format!("comparison of `{}`", - cx.ty_to_string(rhs_t)).index(&FullRange), + &format!("comparison of `{}`", + cx.ty_to_string(rhs_t))[], StrEqFnLangItem); callee::trans_lang_call(cx, did, &[lhs, rhs], None) } @@ -945,7 +945,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if has_nested_bindings(m, col) { let expanded = expand_nested_bindings(bcx, m, col, val); compile_submatch_continue(bcx, - expanded.index(&FullRange), + &expanded[], vals, chk, col, @@ -967,7 +967,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx = compile_guard(bcx, &**guard_expr, m[0].data, - m.index(&(1..m.len())), + &m[1..m.len()], vals, chk, has_genuine_default); @@ -990,8 +990,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); let dm = &tcx.def_map; - let mut vals_left = vals.index(&(0u..col)).to_vec(); - vals_left.push_all(vals.index(&((col + 1u)..))); + let mut vals_left = vals[0u..col].to_vec(); + vals_left.push_all(&vals[(col + 1u)..]); let ccx = bcx.fcx.ccx; // Find a real id (we're adding placeholder wildcard patterns, but @@ -1191,10 +1191,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val); let mut opt_vals = unpacked; - opt_vals.push_all(vals_left.index(&FullRange)); + opt_vals.push_all(&vals_left[]); compile_submatch(opt_cx, - opt_ms.index(&FullRange), - opt_vals.index(&FullRange), + &opt_ms[], + &opt_vals[], branch_chk.as_ref().unwrap_or(chk), has_genuine_default); } @@ -1213,8 +1213,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } _ => { compile_submatch(else_cx, - defaults.index(&FullRange), - vals_left.index(&FullRange), + &defaults[], + &vals_left[], chk, has_genuine_default); } @@ -1333,7 +1333,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, "__llmatch"); trmode = TrByCopy(alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident).index(&FullRange))); + &bcx.ident(ident)[])); } ast::BindByValue(_) => { // in this case, the final type of the variable will be T, @@ -1341,13 +1341,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, // above llmatch = alloca_no_lifetime(bcx, llvariable_ty.ptr_to(), - bcx.ident(ident).index(&FullRange)); + &bcx.ident(ident)[]); trmode = TrByMove; } ast::BindByRef(_) => { llmatch = alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident).index(&FullRange)); + &bcx.ident(ident)[]); trmode = TrByRef; } }; @@ -1415,7 +1415,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, && arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle) }); - compile_submatch(bcx, matches.index(&FullRange), &[discr_datum.val], &chk, has_default); + compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default); let mut arm_cxs = Vec::new(); for arm_data in arm_datas.iter() { @@ -1429,7 +1429,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, arm_cxs.push(bcx); } - bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.index(&FullRange)); + bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[]); return bcx; } @@ -1582,7 +1582,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>, let var_ty = node_id_type(bcx, p_id); // Allocate memory on stack for the binding. - let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).index(&FullRange)); + let llval = alloc_ty(bcx, var_ty, &bcx.ident(*ident)[]); // Subtle: be sure that we *populate* the memory *before* // we schedule the cleanup. @@ -1619,8 +1619,8 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat.repr(bcx.tcx())); if bcx.sess().asm_comments() { - add_comment(bcx, format!("bind_irrefutable_pat(pat={})", - pat.repr(bcx.tcx())).index(&FullRange)); + add_comment(bcx, &format!("bind_irrefutable_pat(pat={})", + pat.repr(bcx.tcx()))[]); } let _indenter = indenter(); diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs index 231de71848a..59b4643fdc5 100644 --- a/src/librustc_trans/trans/adt.rs +++ b/src/librustc_trans/trans/adt.rs @@ -154,7 +154,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Repr<'tcx> { match t.sty { ty::ty_tup(ref elems) => { - Univariant(mk_struct(cx, elems.index(&FullRange), false, t), false) + Univariant(mk_struct(cx, &elems[], false, t), false) } ty::ty_struct(def_id, substs) => { let fields = ty::lookup_struct_fields(cx.tcx(), def_id); @@ -165,17 +165,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); if dtor { ftys.push(cx.tcx().types.bool); } - Univariant(mk_struct(cx, ftys.index(&FullRange), packed, t), dtor) + Univariant(mk_struct(cx, &ftys[], packed, t), dtor) } ty::ty_unboxed_closure(def_id, _, substs) => { let typer = NormalizingUnboxedClosureTyper::new(cx.tcx()); let upvars = typer.unboxed_closure_upvars(def_id, substs).unwrap(); let upvar_types = upvars.iter().map(|u| u.ty).collect::>(); - Univariant(mk_struct(cx, upvar_types.index(&FullRange), false, t), false) + Univariant(mk_struct(cx, &upvar_types[], false, t), false) } ty::ty_enum(def_id, substs) => { let cases = get_cases(cx.tcx(), def_id, substs); - let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).index(&FullRange).get(0) + let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0) .unwrap_or(&attr::ReprAny); let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); @@ -185,7 +185,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // (Typechecking will reject discriminant-sizing attrs.) assert_eq!(hint, attr::ReprAny); let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() }; - return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t), + return Univariant(mk_struct(cx, &ftys[], false, t), dtor); } @@ -205,10 +205,10 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // non-empty body, explicit discriminants should have // been rejected by a checker before this point. if !cases.iter().enumerate().all(|(i,c)| c.discr == (i as Disr)) { - cx.sess().bug(format!("non-C-like enum {} with specified \ + cx.sess().bug(&format!("non-C-like enum {} with specified \ discriminants", ty::item_path_str(cx.tcx(), - def_id)).index(&FullRange)); + def_id))[]); } if cases.len() == 1 { @@ -217,7 +217,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert_eq!(hint, attr::ReprAny); let mut ftys = cases[0].tys.clone(); if dtor { ftys.push(cx.tcx().types.bool); } - return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t), + return Univariant(mk_struct(cx, &ftys[], false, t), dtor); } @@ -226,7 +226,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let mut discr = 0; while discr < 2 { if cases[1 - discr].is_zerolen(cx, t) { - let st = mk_struct(cx, cases[discr].tys.index(&FullRange), + let st = mk_struct(cx, &cases[discr].tys[], false, t); match cases[discr].find_ptr(cx) { Some(ref df) if df.len() == 1 && st.fields.len() == 1 => { @@ -316,17 +316,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let fields : Vec<_> = cases.iter().map(|c| { let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity)); - ftys.push_all(c.tys.index(&FullRange)); + ftys.push_all(&c.tys[]); if dtor { ftys.push(cx.tcx().types.bool); } - mk_struct(cx, ftys.index(&FullRange), false, t) + mk_struct(cx, &ftys[], false, t) }).collect(); - ensure_enum_fits_in_address_space(cx, ity, fields.index(&FullRange), t); + ensure_enum_fits_in_address_space(cx, ity, &fields[], t); General(ity, fields, dtor) } - _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}", - ty_to_string(cx.tcx(), t)).index(&FullRange)) + _ => cx.sess().bug(&format!("adt::represent_type called on non-ADT type: {}", + ty_to_string(cx.tcx(), t))[]) } } @@ -412,7 +412,7 @@ fn find_discr_field_candidate<'tcx>(tcx: &ty::ctxt<'tcx>, impl<'tcx> Case<'tcx> { fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool { - mk_struct(cx, self.tys.index(&FullRange), false, scapegoat).size == 0 + mk_struct(cx, &self.tys[], false, scapegoat).size == 0 } fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option { @@ -451,9 +451,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() }; - ensure_struct_fits_in_address_space(cx, lltys.index(&FullRange), packed, scapegoat); + ensure_struct_fits_in_address_space(cx, &lltys[], packed, scapegoat); - let llty_rec = Type::struct_(cx, lltys.index(&FullRange), packed); + let llty_rec = Type::struct_(cx, &lltys[], packed); Struct { size: machine::llsize_of_alloc(cx, llty_rec), align: machine::llalign_of_min(cx, llty_rec), @@ -502,7 +502,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp return ity; } attr::ReprExtern => { - attempts = match cx.sess().target.target.arch.index(&FullRange) { + attempts = match &cx.sess().target.target.arch[] { // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32` // appears to be used on Linux and NetBSD, but some systems may use the variant // corresponding to `choose_shortest`. However, we don't run on those yet...? @@ -628,7 +628,7 @@ pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match *r { CEnum(..) | General(..) | RawNullablePointer { .. } => { } Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => - llty.set_struct_body(struct_llfields(cx, st, false, false).index(&FullRange), + llty.set_struct_body(&struct_llfields(cx, st, false, false)[], st.packed) } } @@ -644,7 +644,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => { match name { None => { - Type::struct_(cx, struct_llfields(cx, st, sizing, dst).index(&FullRange), + Type::struct_(cx, &struct_llfields(cx, st, sizing, dst)[], st.packed) } Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) } @@ -663,7 +663,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // of the size. // // FIXME #10604: this breaks when vector types are present. - let (size, align) = union_size_and_align(sts.index(&FullRange)); + let (size, align) = union_size_and_align(&sts[]); let align_s = align as u64; let discr_ty = ll_inttype(cx, ity); let discr_size = machine::llsize_of_alloc(cx, discr_ty); @@ -684,10 +684,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Type::array(&discr_ty, align_s / discr_size - 1), fill_ty]; match name { - None => Type::struct_(cx, fields.index(&FullRange), false), + None => Type::struct_(cx, &fields[], false), Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(fields.index(&FullRange), false); + llty.set_struct_body(&fields[], false); llty } } @@ -765,7 +765,7 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField, scrutinee: ValueRef) -> ValueRef { - let llptrptr = GEPi(bcx, scrutinee, discrfield.index(&FullRange)); + let llptrptr = GEPi(bcx, scrutinee, &discrfield[]); let llptr = Load(bcx, llptrptr); let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; ICmp(bcx, cmp, llptr, C_null(val_ty(llptr))) @@ -853,7 +853,7 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, } StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { if discr != nndiscr { - let llptrptr = GEPi(bcx, val, discrfield.index(&FullRange)); + let llptrptr = GEPi(bcx, val, &discrfield[]); let llptrty = val_ty(llptrptr).element_type(); Store(bcx, C_null(llptrty), llptrptr) } @@ -935,7 +935,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v let val = if needs_cast { let ccx = bcx.ccx(); let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields.index(&FullRange), st.packed); + let real_ty = Type::struct_(ccx, &fields[], st.packed); PointerCast(bcx, val, real_ty.ptr_to()) } else { val @@ -967,14 +967,14 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, for (discr, case) in cases.iter().enumerate() { let mut variant_cx = fcx.new_temp_block( - format!("enum-variant-iter-{}", discr.to_string()).index(&FullRange) + &format!("enum-variant-iter-{}", &discr.to_string())[] ); let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true); AddCase(llswitch, rhs_val, variant_cx.llbb); let fields = case.fields.iter().map(|&ty| type_of::type_of(bcx.ccx(), ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields.index(&FullRange), case.packed); + let real_ty = Type::struct_(ccx, &fields[], case.packed); let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to()); variant_cx = f(variant_cx, case, variant_value); @@ -1051,14 +1051,14 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true); let mut f = vec![lldiscr]; f.push_all(vals); - let mut contents = build_const_struct(ccx, case, f.index(&FullRange)); + let mut contents = build_const_struct(ccx, case, &f[]); contents.push_all(&[padding(ccx, max_sz - case.size)]); - C_struct(ccx, contents.index(&FullRange), false) + C_struct(ccx, &contents[], false) } Univariant(ref st, _dro) => { assert!(discr == 0); let contents = build_const_struct(ccx, st, vals); - C_struct(ccx, contents.index(&FullRange), st.packed) + C_struct(ccx, &contents[], st.packed) } RawNullablePointer { nndiscr, nnty, .. } => { if discr == nndiscr { @@ -1070,9 +1070,9 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr } StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => { if discr == nndiscr { - C_struct(ccx, build_const_struct(ccx, + C_struct(ccx, &build_const_struct(ccx, nonnull, - vals).index(&FullRange), + vals)[], false) } else { let vals = nonnull.fields.iter().map(|&ty| { @@ -1080,9 +1080,9 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr // field; see #8506. C_null(type_of::sizing_type_of(ccx, ty)) }).collect::>(); - C_struct(ccx, build_const_struct(ccx, + C_struct(ccx, &build_const_struct(ccx, nonnull, - vals.index(&FullRange)).index(&FullRange), + &vals[])[], false) } } diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs index 890f046be1b..9b6fa32405f 100644 --- a/src/librustc_trans/trans/asm.rs +++ b/src/librustc_trans/trans/asm.rs @@ -71,7 +71,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) callee::DontAutorefArg) }) }).collect::>(); - inputs.push_all(ext_inputs.index(&FullRange)); + inputs.push_all(&ext_inputs[]); // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); @@ -91,18 +91,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) if !clobbers.is_empty() { clobbers.push(','); } - clobbers.push_str(more_clobbers.index(&FullRange)); + clobbers.push_str(&more_clobbers[]); } // Add the clobbers to our constraints list if clobbers.len() != 0 && constraints.len() != 0 { constraints.push(','); - constraints.push_str(clobbers.index(&FullRange)); + constraints.push_str(&clobbers[]); } else { - constraints.push_str(clobbers.index(&FullRange)); + constraints.push_str(&clobbers[]); } - debug!("Asm Constraints: {}", constraints.index(&FullRange)); + debug!("Asm Constraints: {}", &constraints[]); let num_outputs = outputs.len(); @@ -112,7 +112,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) } else if num_outputs == 1 { output_types[0] } else { - Type::struct_(bcx.ccx(), output_types.index(&FullRange), false) + Type::struct_(bcx.ccx(), &output_types[], false) }; let dialect = match ia.dialect { diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs index 057d0f378e6..47a296b99a3 100644 --- a/src/librustc_trans/trans/base.rs +++ b/src/librustc_trans/trans/base.rs @@ -249,7 +249,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, let f = decl_rust_fn(ccx, fn_ty, name); csearch::get_item_attrs(&ccx.sess().cstore, did, |attrs| { - set_llvm_fn_attrs(ccx, attrs.index(&FullRange), f) + set_llvm_fn_attrs(ccx, &attrs[], f) }); ccx.externs().borrow_mut().insert(name.to_string(), f); @@ -372,9 +372,9 @@ fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, match bcx.tcx().lang_items.require(it) { Ok(id) => id, Err(s) => { - bcx.sess().fatal(format!("allocation of `{}` {}", + bcx.sess().fatal(&format!("allocation of `{}` {}", bcx.ty_to_string(info_ty), - s).index(&FullRange)); + s)[]); } } } @@ -493,7 +493,7 @@ pub fn unset_split_stack(f: ValueRef) { // silently mangles such symbols, breaking our linkage model. pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) { if ccx.all_llvm_symbols().borrow().contains(&sym) { - ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).index(&FullRange)); + ccx.sess().bug(&format!("duplicate LLVM symbol: {}", sym)[]); } ccx.all_llvm_symbols().borrow_mut().insert(sym); } @@ -530,7 +530,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::mk_nil(ccx.tcx())); get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), - name.index(&FullRange), + &name[], llvm::CCallConv, llty, dtor_ty) @@ -778,9 +778,9 @@ fn iter_variant<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, for variant in (*variants).iter() { let variant_cx = fcx.new_temp_block( - format!("enum-iter-variant-{}", - variant.disr_val.to_string().index(&FullRange)) - .index(&FullRange)); + &format!("enum-iter-variant-{}", + &variant.disr_val.to_string()[]) + []); match adt::trans_case(cx, &*repr, variant.disr_val) { _match::SingleResult(r) => { AddCase(llswitch, r.val, variant_cx.llbb) @@ -804,8 +804,8 @@ fn iter_variant<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, } } _ => { - cx.sess().unimpl(format!("type in iter_structural_ty: {}", - ty_to_string(cx.tcx(), t)).index(&FullRange)) + cx.sess().unimpl(&format!("type in iter_structural_ty: {}", + ty_to_string(cx.tcx(), t))[]) } } return cx; @@ -886,8 +886,8 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>( (ICmp(cx, llvm::IntEQ, rhs, zero), false) } _ => { - cx.sess().bug(format!("fail-if-zero on unexpected type: {}", - ty_to_string(cx.tcx(), rhs_t)).index(&FullRange)); + cx.sess().bug(&format!("fail-if-zero on unexpected type: {}", + ty_to_string(cx.tcx(), rhs_t))[]); } }; let bcx = with_cond(cx, is_zero, |bcx| { @@ -941,14 +941,14 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::ty_bare_fn(_, ref fn_ty) => { match ccx.sess().target.target.adjust_abi(fn_ty.abi) { Rust | RustCall => { - get_extern_rust_fn(ccx, t, name.index(&FullRange), did) + get_extern_rust_fn(ccx, t, &name[], did) } RustIntrinsic => { ccx.sess().bug("unexpected intrinsic in trans_external_path") } _ => { foreign::register_foreign_item_fn(ccx, fn_ty.abi, t, - name.index(&FullRange)) + &name[]) } } } @@ -995,7 +995,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llresult = Invoke(bcx, llfn, - llargs.index(&FullRange), + &llargs[], normal_bcx.llbb, landing_pad, Some(attributes)); @@ -1011,7 +1011,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => debuginfo::clear_source_location(bcx.fcx) }; - let llresult = Call(bcx, llfn, llargs.index(&FullRange), Some(attributes)); + let llresult = Call(bcx, llfn, &llargs[], Some(attributes)); return (llresult, bcx); } } @@ -1128,7 +1128,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) { pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) { let _icx = push_ctxt("call_memcpy"); let ccx = cx.ccx(); - let key = match ccx.sess().target.target.target_word_size.index(&FullRange) { + let key = match &ccx.sess().target.target.target_word_size[] { "32" => "llvm.memcpy.p0i8.p0i8.i32", "64" => "llvm.memcpy.p0i8.p0i8.i64", tws => panic!("Unsupported target word size for memcpy: {}", tws), @@ -1175,7 +1175,7 @@ fn memzero<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>) { let llty = type_of::type_of(ccx, ty); - let intrinsic_key = match ccx.sess().target.target.target_word_size.index(&FullRange) { + let intrinsic_key = match &ccx.sess().target.target.target_word_size[] { "32" => "llvm.memset.p0i8.i32", "64" => "llvm.memset.p0i8.i64", tws => panic!("Unsupported target word size for memset: {}", tws), @@ -1663,7 +1663,7 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>( "argtuple", arg_scope_id)); let untupled_arg_types = match monomorphized_arg_types[0].sty { - ty::ty_tup(ref types) => types.index(&FullRange), + ty::ty_tup(ref types) => &types[], _ => { bcx.tcx().sess.span_bug(args[0].pat.span, "first arg to `rust-call` ABI function \ @@ -1851,12 +1851,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let arg_datums = if abi != RustCall { create_datums_for_fn_args(&fcx, - monomorphized_arg_types.index(&FullRange)) + &monomorphized_arg_types[]) } else { create_datums_for_fn_args_under_call_abi( bcx, arg_scope, - monomorphized_arg_types.index(&FullRange)) + &monomorphized_arg_types[]) }; bcx = match closure_env.kind { @@ -1864,16 +1864,16 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, copy_args_to_allocas(&fcx, arg_scope, bcx, - decl.inputs.index(&FullRange), + &decl.inputs[], arg_datums) } closure::UnboxedClosure(..) => { copy_unboxed_closure_args_to_allocas( bcx, arg_scope, - decl.inputs.index(&FullRange), + &decl.inputs[], arg_datums, - monomorphized_arg_types.index(&FullRange)) + &monomorphized_arg_types[]) } }; @@ -1990,9 +1990,9 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ty::erase_late_bound_regions(bcx.tcx(), &bft.sig.output()).unwrap() } _ => ccx.sess().bug( - format!("trans_enum_variant_constructor: \ + &format!("trans_enum_variant_constructor: \ unexpected ctor return type {}", - ctor_ty.repr(tcx)).index(&FullRange)) + ctor_ty.repr(tcx))[]) }; // Get location to store the result. If the user does not care about @@ -2015,7 +2015,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bcx = expr::trans_adt(bcx, result_ty, disr, - fields.index(&FullRange), + &fields[], None, expr::SaveIn(llresult), call_info); @@ -2064,9 +2064,9 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx ty::erase_late_bound_regions(ccx.tcx(), &bft.sig.output()) } _ => ccx.sess().bug( - format!("trans_enum_variant_or_tuple_like_struct: \ + &format!("trans_enum_variant_or_tuple_like_struct: \ unexpected ctor return type {}", - ty_to_string(ccx.tcx(), ctor_ty)).index(&FullRange)) + ty_to_string(ccx.tcx(), ctor_ty))[]) }; let arena = TypedArena::new(); @@ -2080,7 +2080,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx ty::erase_late_bound_regions( ccx.tcx(), &ty::ty_fn_args(ctor_ty)); - let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.index(&FullRange)); + let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[]); if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) { let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot"); @@ -2164,9 +2164,9 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, // pass for the latter already ran. lint::raw_emit_lint(&ccx.tcx().sess, lint::builtin::VARIANT_SIZE_DIFFERENCES, *lvlsrc.unwrap(), Some(sp), - format!("enum variant is more than three times larger \ + &format!("enum variant is more than three times larger \ ({} bytes) than the next largest (ignoring padding)", - largest).index(&FullRange)); + largest)[]); ccx.sess().span_note(enum_def.variants[largest_index].span, "this variant is the largest"); @@ -2284,7 +2284,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { match item.node { ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => { if !generics.is_type_parameterized() { - let trans_everywhere = attr::requests_inline(item.attrs.index(&FullRange)); + let trans_everywhere = attr::requests_inline(&item.attrs[]); // Ignore `trans_everywhere` for cross-crate inlined items // (`from_external`). `trans_item` will be called once for each // compilation unit that references the item, so it will still get @@ -2295,7 +2295,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { foreign::trans_rust_fn_with_foreign_abi(ccx, &**decl, &**body, - item.attrs.index(&FullRange), + &item.attrs[], llfn, &Substs::trans_empty(), item.id, @@ -2307,7 +2307,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { llfn, &Substs::trans_empty(), item.id, - item.attrs.index(&FullRange)); + &item.attrs[]); } update_linkage(ccx, llfn, @@ -2324,7 +2324,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => { meth::trans_impl(ccx, item.ident, - impl_items.index(&FullRange), + &impl_items[], generics, item.id); } @@ -2354,7 +2354,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { // Do static_assert checking. It can't really be done much earlier // because we need to get the value of the bool out of LLVM - if attr::contains_name(item.attrs.index(&FullRange), "static_assert") { + if attr::contains_name(&item.attrs[], "static_assert") { if m == ast::MutMutable { ccx.sess().span_fatal(expr.span, "cannot have static_assert on a mutable \ @@ -2431,7 +2431,7 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => panic!("expected bare rust fn") }; - let llfn = decl_rust_fn(ccx, node_type, sym.index(&FullRange)); + let llfn = decl_rust_fn(ccx, node_type, &sym[]); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2476,7 +2476,7 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty< match fn_sig.inputs[1].sty { ty::ty_tup(ref t_in) => { - inputs.push_all(t_in.index(&FullRange)); + inputs.push_all(&t_in[]); inputs } _ => ccx.sess().bug("expected tuple'd inputs") @@ -2612,7 +2612,7 @@ pub fn register_fn_llvmty(ccx: &CrateContext, debug!("register_fn_llvmty id={} sym={}", node_id, sym); let llfn = decl_fn(ccx, - sym.index(&FullRange), + &sym[], cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx()))); @@ -2668,7 +2668,7 @@ fn create_entry_fn(ccx: &CrateContext, let (start_fn, args) = if use_start_lang_item { let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) { Ok(id) => id, - Err(s) => { ccx.sess().fatal(s.index(&FullRange)); } + Err(s) => { ccx.sess().fatal(&s[]); } }; let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { get_item_val(ccx, start_def_id.node) @@ -2760,7 +2760,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let val = match item { ast_map::NodeItem(i) => { let ty = ty::node_id_to_type(ccx.tcx(), i.id); - let sym = |&:| exported_name(ccx, id, ty, i.attrs.index(&FullRange)); + let sym = |&:| exported_name(ccx, id, ty, &i.attrs[]); let v = match i.node { ast::ItemStatic(_, _, ref expr) => { @@ -2783,16 +2783,16 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } else { llvm::LLVMTypeOf(v) }; - if contains_null(sym.index(&FullRange)) { + if contains_null(&sym[]) { ccx.sess().fatal( - format!("Illegal null byte in export_name \ - value: `{}`", sym).index(&FullRange)); + &format!("Illegal null byte in export_name \ + value: `{}`", sym)[]); } let buf = CString::from_slice(sym.as_bytes()); let g = llvm::LLVMAddGlobal(ccx.llmod(), llty, buf.as_ptr()); - if attr::contains_name(i.attrs.index(&FullRange), + if attr::contains_name(&i.attrs[], "thread_local") { llvm::set_thread_local(g, true); } @@ -2817,19 +2817,19 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { sym, i.id) }; - set_llvm_fn_attrs(ccx, i.attrs.index(&FullRange), llfn); + set_llvm_fn_attrs(ccx, &i.attrs[], llfn); llfn } _ => panic!("get_item_val: weird result in table") }; - match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), + match attr::first_attr_value_str_by_name(&i.attrs[], "link_section") { Some(sect) => { if contains_null(sect.get()) { - ccx.sess().fatal(format!("Illegal null byte in link_section value: `{}`", - sect.get()).index(&FullRange)); + ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", + sect.get())[]); } unsafe { let buf = CString::from_slice(sect.get().as_bytes()); @@ -2872,7 +2872,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let abi = ccx.tcx().map.get_foreign_abi(id); let ty = ty::node_id_to_type(ccx.tcx(), ni.id); let name = foreign::link_name(&*ni); - foreign::register_foreign_item_fn(ccx, abi, ty, name.get().index(&FullRange)) + foreign::register_foreign_item_fn(ccx, abi, ty, &name.get()[]) } ast::ForeignItemStatic(..) => { foreign::register_static(ccx, &*ni) @@ -2895,7 +2895,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let sym = exported_name(ccx, id, ty, - enm.attrs.index(&FullRange)); + &enm.attrs[]); llfn = match enm.node { ast::ItemEnum(_, _) => { @@ -2922,8 +2922,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let sym = exported_name(ccx, id, ty, - struct_item.attrs - .index(&FullRange)); + &struct_item.attrs[]); let llfn = register_fn(ccx, struct_item.span, sym, ctor_id, ty); set_inline_hint(llfn); @@ -2931,8 +2930,8 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } ref variant => { - ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}", - variant).index(&FullRange)) + ccx.sess().bug(&format!("get_item_val(): unexpected variant: {:?}", + variant)[]) } }; @@ -2953,10 +2952,10 @@ fn register_method(ccx: &CrateContext, id: ast::NodeId, m: &ast::Method) -> ValueRef { let mty = ty::node_id_to_type(ccx.tcx(), id); - let sym = exported_name(ccx, id, mty, m.attrs.index(&FullRange)); + let sym = exported_name(ccx, id, mty, &m.attrs[]); let llfn = register_fn(ccx, m.span, sym, id, mty); - set_llvm_fn_attrs(ccx, m.attrs.index(&FullRange), llfn); + set_llvm_fn_attrs(ccx, &m.attrs[], llfn); llfn } @@ -2995,7 +2994,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec { Some(compressed) => compressed, None => cx.sess().fatal("failed to compress metadata"), }.as_slice()); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed.index(&FullRange)); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[]); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = format!("rust_metadata_{}_{}", cx.link_meta().crate_name, @@ -3124,7 +3123,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) let link_meta = link::build_link_meta(&tcx.sess, krate, name); let codegen_units = tcx.sess.opts.cg.codegen_units; - let shared_ccx = SharedCrateContext::new(link_meta.crate_name.index(&FullRange), + let shared_ccx = SharedCrateContext::new(&link_meta.crate_name[], codegen_units, tcx, export_map, @@ -3226,7 +3225,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) llmod: shared_ccx.metadata_llmod(), }; let formats = shared_ccx.tcx().dependency_formats.borrow().clone(); - let no_builtins = attr::contains_name(krate.attrs.index(&FullRange), "no_builtins"); + let no_builtins = attr::contains_name(&krate.attrs[], "no_builtins"); let translation = CrateTranslation { modules: modules, diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs index d0eaf799af1..b80088e4690 100644 --- a/src/librustc_trans/trans/builder.rs +++ b/src/librustc_trans/trans/builder.rs @@ -552,11 +552,11 @@ pub fn gepi(&self, base: ValueRef, ixs: &[uint]) -> ValueRef { for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs.iter()) { *small_vec_e = C_i32(self.ccx, ix as i32); } - self.inbounds_gep(base, small_vec.index(&(0..ixs.len()))) + self.inbounds_gep(base, &small_vec[0..ixs.len()]) } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, v.index(&FullRange)) + self.inbounds_gep(base, &v[]) } } @@ -764,8 +764,8 @@ pub fn add_span_comment(&self, sp: Span, text: &str) { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", s.index(&FullRange)); - self.add_comment(s.index(&FullRange)); + debug!("{}", &s[]); + self.add_comment(&s[]); } } @@ -802,7 +802,7 @@ pub fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char, }).collect::>(); debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output)); - let fty = Type::func(argtys.index(&FullRange), &output); + let fty = Type::func(&argtys[], &output); unsafe { let v = llvm::LLVMInlineAsm( fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); diff --git a/src/librustc_trans/trans/cabi.rs b/src/librustc_trans/trans/cabi.rs index a901142467b..8a2a2534cab 100644 --- a/src/librustc_trans/trans/cabi.rs +++ b/src/librustc_trans/trans/cabi.rs @@ -108,7 +108,7 @@ pub fn compute_abi_info(ccx: &CrateContext, atys: &[Type], rty: Type, ret_def: bool) -> FnType { - match ccx.sess().target.target.arch.index(&FullRange) { + match &ccx.sess().target.target.arch[] { "x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def), "x86_64" => if ccx.sess().target.target.options.is_like_windows { cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def) @@ -118,7 +118,7 @@ pub fn compute_abi_info(ccx: &CrateContext, "arm" => cabi_arm::compute_abi_info(ccx, atys, rty, ret_def), "aarch64" => cabi_aarch64::compute_abi_info(ccx, atys, rty, ret_def), "mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def), - a => ccx.sess().fatal((format!("unrecognized arch \"{}\" in target specification", a)) - .index(&FullRange)), + a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a) + []), } } diff --git a/src/librustc_trans/trans/cabi_x86_64.rs b/src/librustc_trans/trans/cabi_x86_64.rs index f40072d1cba..86190b1e566 100644 --- a/src/librustc_trans/trans/cabi_x86_64.rs +++ b/src/librustc_trans/trans/cabi_x86_64.rs @@ -318,7 +318,7 @@ fn llvec_len(cls: &[RegClass]) -> uint { tys.push(Type::i64(ccx)); } SSEFv => { - let vec_len = llvec_len(cls.index(&((i + 1u)..))); + let vec_len = llvec_len(&cls[(i + 1u)..]); let vec_ty = Type::vector(&Type::f32(ccx), (vec_len * 2u) as u64); tys.push(vec_ty); i += vec_len; diff --git a/src/librustc_trans/trans/callee.rs b/src/librustc_trans/trans/callee.rs index b7b486f1d0a..274c3e0a50a 100644 --- a/src/librustc_trans/trans/callee.rs +++ b/src/librustc_trans/trans/callee.rs @@ -112,9 +112,9 @@ fn datum_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) _ => { bcx.tcx().sess.span_bug( expr.span, - format!("type of callee is neither bare-fn nor closure: \ + &format!("type of callee is neither bare-fn nor closure: \ {}", - bcx.ty_to_string(datum.ty)).index(&FullRange)); + bcx.ty_to_string(datum.ty))[]); } } } @@ -206,8 +206,8 @@ fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, def::DefSelfTy(..) | def::DefAssociatedPath(..) => { bcx.tcx().sess.span_bug( ref_expr.span, - format!("cannot translate def {:?} \ - to a callable thing!", def).index(&FullRange)); + &format!("cannot translate def {:?} \ + to a callable thing!", def)[]); } } } @@ -289,8 +289,8 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( } _ => { - tcx.sess.bug(format!("trans_fn_pointer_shim invoked on invalid type: {}", - bare_fn_ty.repr(tcx)).index(&FullRange)); + tcx.sess.bug(&format!("trans_fn_pointer_shim invoked on invalid type: {}", + bare_fn_ty.repr(tcx))[]); } }; let sig = ty::erase_late_bound_regions(tcx, sig); @@ -315,7 +315,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let llfn = decl_internal_rust_fn(ccx, tuple_fn_ty, - function_name.index(&FullRange)); + &function_name[]); // let block_arena = TypedArena::new(); @@ -350,7 +350,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( None, bare_fn_ty, |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) }, - ArgVals(llargs.index(&FullRange)), + ArgVals(&llargs[]), dest).bcx; finish_fn(&fcx, bcx, sig.output); @@ -776,7 +776,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, // Invoke the actual rust fn and update bcx/llresult. let (llret, b) = base::invoke(bcx, llfn, - llargs.index(&FullRange), + &llargs[], callee_ty, call_info); bcx = b; @@ -815,7 +815,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, bcx = foreign::trans_native_call(bcx, callee_ty, llfn, opt_llretslot.unwrap(), - llargs.index(&FullRange), arg_tys); + &llargs[], arg_tys); } fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope); diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs index 92a96cd02b5..5658889aaf3 100644 --- a/src/librustc_trans/trans/cleanup.rs +++ b/src/librustc_trans/trans/cleanup.rs @@ -403,8 +403,8 @@ fn schedule_clean_in_ast_scope(&self, } self.ccx.sess().bug( - format!("no cleanup scope {} found", - self.ccx.tcx().map.node_to_string(cleanup_scope)).index(&FullRange)); + &format!("no cleanup scope {} found", + self.ccx.tcx().map.node_to_string(cleanup_scope))[]); } /// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope. @@ -584,9 +584,9 @@ fn trans_cleanups_to_exit_scope(&'blk self, } LoopExit(id, _) => { - self.ccx.sess().bug(format!( + self.ccx.sess().bug(&format!( "cannot exit from scope {}, \ - not in scope", id).index(&FullRange)); + not in scope", id)[]); } } } @@ -655,7 +655,7 @@ fn trans_cleanups_to_exit_scope(&'blk self, let name = scope.block_name("clean"); debug!("generating cleanups for {}", name); let bcx_in = self.new_block(label.is_unwind(), - name.index(&FullRange), + &name[], None); let mut bcx_out = bcx_in; for cleanup in scope.cleanups.iter().rev() { @@ -702,7 +702,7 @@ fn get_or_create_landing_pad(&'blk self) -> BasicBlockRef { Some(llbb) => { return llbb; } None => { let name = last_scope.block_name("unwind"); - pad_bcx = self.new_block(true, name.index(&FullRange), None); + pad_bcx = self.new_block(true, &name[], None); last_scope.cached_landing_pad = Some(pad_bcx.llbb); } } @@ -1022,8 +1022,8 @@ pub fn temporary_scope(tcx: &ty::ctxt, r } None => { - tcx.sess.bug(format!("no temporary scope available for expr {}", - id).index(&FullRange)) + tcx.sess.bug(&format!("no temporary scope available for expr {}", + id)[]) } } } diff --git a/src/librustc_trans/trans/closure.rs b/src/librustc_trans/trans/closure.rs index ad2ed67b22c..76f82c14e0a 100644 --- a/src/librustc_trans/trans/closure.rs +++ b/src/librustc_trans/trans/closure.rs @@ -154,7 +154,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = ccx.tcx(); // compute the type of the closure - let cdata_ty = mk_closure_tys(tcx, bound_values.index(&FullRange)); + let cdata_ty = mk_closure_tys(tcx, &bound_values[]); // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a // tuple. This could be a ptr in uniq or a box or on stack, @@ -182,8 +182,8 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, debug!("Copy {} into closure", bv.to_string(ccx)); if ccx.sess().asm_comments() { - add_comment(bcx, format!("Copy {} into closure", - bv.to_string(ccx)).index(&FullRange)); + add_comment(bcx, &format!("Copy {} into closure", + bv.to_string(ccx))[]); } let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]); @@ -420,7 +420,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let s = tcx.map.with_path(id, |path| { mangle_internal_name_by_path_and_seq(path, "closure") }); - let llfn = decl_internal_rust_fn(ccx, fty, s.index(&FullRange)); + let llfn = decl_internal_rust_fn(ccx, fty, &s[]); // set an inline hint for all closures set_inline_hint(llfn); @@ -444,7 +444,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, &[], ty::erase_late_bound_regions(ccx.tcx(), &ty::ty_fn_ret(fty)), ty::ty_fn_abi(fty), - ClosureEnv::new(freevars.index(&FullRange), + ClosureEnv::new(&freevars[], BoxedClosure(cdata_ty, store))); fill_fn_pair(bcx, dest_addr, llfn, llbox); bcx @@ -489,7 +489,7 @@ pub fn get_or_create_declaration_if_unboxed_closure<'a, 'tcx>(ccx: &CrateContext mangle_internal_name_by_path_and_seq(path, "unboxed_closure") }); - let llfn = decl_internal_rust_fn(ccx, function_type, symbol.index(&FullRange)); + let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[]); // set an inline hint for all closures set_inline_hint(llfn); @@ -544,7 +544,7 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( &[], sig.output, function_type.abi, - ClosureEnv::new(freevars.index(&FullRange), + ClosureEnv::new(&freevars[], UnboxedClosure(freevar_mode))); // Don't hoist this to the top of the function. It's perfectly legitimate diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs index 237fc185636..9b65259ad51 100644 --- a/src/librustc_trans/trans/common.rs +++ b/src/librustc_trans/trans/common.rs @@ -273,7 +273,7 @@ pub fn gensym_name(name: &str) -> PathElem { let num = token::gensym(name).uint(); // use one colon which will get translated to a period by the mangler, and // we're guaranteed that `num` is globally unique for this crate. -PathName(token::gensym(format!("{}:{}", name, num).index(&FullRange))) +PathName(token::gensym(&format!("{}:{}", name, num)[])) } #[derive(Copy)] @@ -600,8 +600,8 @@ pub fn def(&self, nid: ast::NodeId) -> def::Def { match self.tcx().def_map.borrow().get(&nid) { Some(v) => v.clone(), None => { - self.tcx().sess.bug(format!( - "no def associated with node id {}", nid).index(&FullRange)); + self.tcx().sess.bug(&format!( + "no def associated with node id {}", nid)[]); } } } @@ -1029,9 +1029,9 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, Err(e) => { tcx.sess.span_bug( span, - format!("Encountered error `{}` selecting `{}` during trans", + &format!("Encountered error `{}` selecting `{}` during trans", e.repr(tcx), - trait_ref.repr(tcx)).index(&FullRange)) + trait_ref.repr(tcx))[]) } }; @@ -1123,8 +1123,8 @@ pub fn drain_fulfillment_cx<'a,'tcx,T>(span: Span, } else { infcx.tcx.sess.span_bug( span, - format!("Encountered errors `{}` fulfilling during trans", - errors.repr(infcx.tcx)).index(&FullRange)); + &format!("Encountered errors `{}` fulfilling during trans", + errors.repr(infcx.tcx))[]); } } } @@ -1163,8 +1163,8 @@ pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, }; if substs.types.any(|t| ty::type_needs_infer(*t)) { - tcx.sess.bug(format!("type parameters for node {:?} include inference types: {:?}", - node, substs.repr(tcx)).index(&FullRange)); + tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}", + node, substs.repr(tcx))[]); } monomorphize::apply_param_substs(tcx, @@ -1182,8 +1182,8 @@ pub fn langcall(bcx: Block, Err(s) => { let msg = format!("{} {}", msg, s); match span { - Some(span) => bcx.tcx().sess.span_fatal(span, msg.index(&FullRange)), - None => bcx.tcx().sess.fatal(msg.index(&FullRange)), + Some(span) => bcx.tcx().sess.span_fatal(span, &msg[]), + None => bcx.tcx().sess.fatal(&msg[]), } } } diff --git a/src/librustc_trans/trans/consts.rs b/src/librustc_trans/trans/consts.rs index b0474d7e011..00b97286de3 100644 --- a/src/librustc_trans/trans/consts.rs +++ b/src/librustc_trans/trans/consts.rs @@ -52,9 +52,9 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) C_integral(Type::uint_from_ty(cx, t), i as u64, false) } _ => cx.sess().span_bug(lit.span, - format!("integer literal has type {} (expected int \ + &format!("integer literal has type {} (expected int \ or uint)", - ty_to_string(cx.tcx(), lit_int_ty)).index(&FullRange)) + ty_to_string(cx.tcx(), lit_int_ty))[]) } } ast::LitFloat(ref fs, t) => { @@ -74,7 +74,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) } ast::LitBool(b) => C_bool(cx, b), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), - ast::LitBinary(ref data) => C_binary_slice(cx, data.index(&FullRange)), + ast::LitBinary(ref data) => C_binary_slice(cx, &data[]), } } @@ -93,9 +93,9 @@ fn const_vec(cx: &CrateContext, e: &ast::Expr, .collect::>(); // If the vector contains enums, an LLVM array won't work. let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs.index(&FullRange), false) + C_struct(cx, &vs[], false) } else { - C_array(llunitty, vs.index(&FullRange)) + C_array(llunitty, &vs[]) }; (v, llunitty) } @@ -148,14 +148,14 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef, (const_deref_newtype(cx, v, t), mt.ty) } _ => { - cx.sess().bug(format!("unexpected dereferenceable type {}", - ty_to_string(cx.tcx(), t)).index(&FullRange)) + cx.sess().bug(&format!("unexpected dereferenceable type {}", + ty_to_string(cx.tcx(), t))[]) } } } None => { - cx.sess().bug(format!("cannot dereference const of type {}", - ty_to_string(cx.tcx(), t)).index(&FullRange)) + cx.sess().bug(&format!("cannot dereference const of type {}", + ty_to_string(cx.tcx(), t))[]) } } } @@ -251,16 +251,16 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) ], false); } _ => cx.sess().span_bug(e.span, - format!("unimplemented type in const unsize: {}", - ty_to_string(cx.tcx(), ty)).index(&FullRange)) + &format!("unimplemented type in const unsize: {}", + ty_to_string(cx.tcx(), ty))[]) } } _ => { cx.sess() .span_bug(e.span, - format!("unimplemented const \ + &format!("unimplemented const \ autoref {:?}", - autoref).index(&FullRange)) + autoref)[]) } } } @@ -279,9 +279,9 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) llvm::LLVMDumpValue(llconst); llvm::LLVMDumpValue(C_undef(llty)); } - cx.sess().bug(format!("const {} of type {} has size {} instead of {}", + cx.sess().bug(&format!("const {} of type {} has size {} instead of {}", e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety), - csize, tsize).index(&FullRange)); + csize, tsize)[]); } (llconst, ety_adjusted) } @@ -429,23 +429,23 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { (const_deref_ptr(cx, e1), const_get_elt(cx, bv, &[1])) }, _ => cx.sess().span_bug(base.span, - format!("index-expr base must be a vector \ + &format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).index(&FullRange)) + ty_to_string(cx.tcx(), bt))[]) }, ty::ty_rptr(_, mt) => match mt.ty.sty { ty::ty_vec(_, Some(u)) => { (const_deref_ptr(cx, bv), C_uint(cx, u)) }, _ => cx.sess().span_bug(base.span, - format!("index-expr base must be a vector \ + &format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).index(&FullRange)) + ty_to_string(cx.tcx(), bt))[]) }, _ => cx.sess().span_bug(base.span, - format!("index-expr base must be a vector \ + &format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).index(&FullRange)) + ty_to_string(cx.tcx(), bt))[]) }; let len = llvm::LLVMConstIntGetZExtValue(len) as u64; @@ -546,8 +546,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { ast::ExprTup(ref es) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let vals = map_list(es.index(&FullRange)); - adt::trans_const(cx, &*repr, 0, vals.index(&FullRange)) + let vals = map_list(&es[]); + adt::trans_const(cx, &*repr, 0, &vals[]) } ast::ExprStruct(_, ref fs, ref base_opt) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -578,7 +578,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { } } }).collect::>(); - adt::trans_const(cx, &*repr, discr, cs.index(&FullRange)) + adt::trans_const(cx, &*repr, discr, &cs[]) }) } ast::ExprVec(ref es) => { @@ -595,9 +595,9 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { }; let vs: Vec<_> = repeat(const_expr(cx, &**elem).0).take(n).collect(); if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs.index(&FullRange), false) + C_struct(cx, &vs[], false) } else { - C_array(llunitty, vs.index(&FullRange)) + C_array(llunitty, &vs[]) } } ast::ExprPath(_) => { @@ -645,8 +645,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { Some(def::DefStruct(_)) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let arg_vals = map_list(args.index(&FullRange)); - adt::trans_const(cx, &*repr, 0, arg_vals.index(&FullRange)) + let arg_vals = map_list(&args[]); + adt::trans_const(cx, &*repr, 0, &arg_vals[]) } Some(def::DefVariant(enum_did, variant_did, _)) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -654,11 +654,11 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { let vinfo = ty::enum_variant_with_id(cx.tcx(), enum_did, variant_did); - let arg_vals = map_list(args.index(&FullRange)); + let arg_vals = map_list(&args[]); adt::trans_const(cx, &*repr, vinfo.disr_val, - arg_vals.index(&FullRange)) + &arg_vals[]) } _ => cx.sess().span_bug(e.span, "expected a struct or variant def") } diff --git a/src/librustc_trans/trans/context.rs b/src/librustc_trans/trans/context.rs index 35fb34eafb4..68773656056 100644 --- a/src/librustc_trans/trans/context.rs +++ b/src/librustc_trans/trans/context.rs @@ -284,7 +284,7 @@ pub fn new(crate_name: &str, // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.index(&FullRange)); + let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[]); shared_ccx.local_ccxs.push(local_ccx); } @@ -369,12 +369,12 @@ fn new(shared: &SharedCrateContext<'tcx>, unsafe { let (llcx, llmod) = create_context_and_module(&shared.tcx.sess, name); - let td = mk_target_data(shared.tcx + let td = mk_target_data(&shared.tcx .sess .target .target .data_layout - .index(&FullRange)); + []); let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo { Some(debuginfo::CrateDebugContext::new(llmod)) @@ -721,7 +721,7 @@ pub fn trait_cache(&self) -> &RefCell, /// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable /// address space on 64-bit ARMv8 and x86_64. pub fn obj_size_bound(&self) -> u64 { - match self.sess().target.target.target_word_size.index(&FullRange) { + match &self.sess().target.target.target_word_size[] { "32" => 1 << 31, "64" => 1 << 47, _ => unreachable!() // error handled by config::build_target_config @@ -730,8 +730,8 @@ pub fn obj_size_bound(&self) -> u64 { pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! { self.sess().fatal( - format!("the type `{}` is too big for the current architecture", - obj.repr(self.tcx())).index(&FullRange)) + &format!("the type `{}` is too big for the current architecture", + obj.repr(self.tcx()))[]) } } diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs index 38d40a8322f..adf302501cd 100644 --- a/src/librustc_trans/trans/controlflow.rs +++ b/src/librustc_trans/trans/controlflow.rs @@ -48,7 +48,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, debug!("trans_stmt({})", s.repr(cx.tcx())); if cx.sess().asm_comments() { - add_span_comment(cx, s.span, s.repr(cx.tcx()).index(&FullRange)); + add_span_comment(cx, s.span, &s.repr(cx.tcx())[]); } let mut bcx = cx; @@ -188,7 +188,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let name = format!("then-block-{}-", thn.id); - let then_bcx_in = bcx.fcx.new_id_block(name.index(&FullRange), thn.id); + let then_bcx_in = bcx.fcx.new_id_block(&name[], thn.id); let then_bcx_out = trans_block(then_bcx_in, &*thn, dest); trans::debuginfo::clear_source_location(bcx.fcx); @@ -439,8 +439,8 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, match bcx.tcx().def_map.borrow().get(&expr_id) { Some(&def::DefLabel(loop_id)) => loop_id, ref r => { - bcx.tcx().sess.bug(format!("{:?} in def-map for label", - r).index(&FullRange)) + bcx.tcx().sess.bug(&format!("{:?} in def-map for label", + r)[]) } } } @@ -504,7 +504,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let v_str = C_str_slice(ccx, fail_str); let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); + let filename = token::intern_and_get_ident(&loc.file.name[]); let filename = C_str_slice(ccx, filename); let line = C_uint(ccx, loc.line); let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false); @@ -513,7 +513,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args.index(&FullRange), + &args[], Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; @@ -529,7 +529,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Extract the file/line from the span let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); + let filename = token::intern_and_get_ident(&loc.file.name[]); // Invoke the lang item let filename = C_str_slice(ccx, filename); @@ -540,7 +540,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicBoundsCheckFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args.index(&FullRange), + &args[], Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; diff --git a/src/librustc_trans/trans/datum.rs b/src/librustc_trans/trans/datum.rs index 26518d4092f..8b52732f4ee 100644 --- a/src/librustc_trans/trans/datum.rs +++ b/src/librustc_trans/trans/datum.rs @@ -463,8 +463,8 @@ pub fn get_element<'blk, F>(&self, bcx: Block<'blk, 'tcx>, ty: Ty<'tcx>, gep(base) } _ => bcx.tcx().sess.bug( - format!("Unexpected unsized type in get_element: {}", - bcx.ty_to_string(self.ty)).index(&FullRange)) + &format!("Unexpected unsized type in get_element: {}", + bcx.ty_to_string(self.ty))[]) }; Datum { val: val, diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index e2e1b3a799b..3a6f4b47e4e 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -284,8 +284,8 @@ fn register_type_with_metadata<'a>(&mut self, type_: Ty<'tcx>, metadata: DIType) { if self.type_to_metadata.insert(type_, metadata).is_some() { - cx.sess().bug(format!("Type metadata for Ty '{}' is already in the TypeMap!", - ppaux::ty_to_string(cx.tcx(), type_)).index(&FullRange)); + cx.sess().bug(&format!("Type metadata for Ty '{}' is already in the TypeMap!", + ppaux::ty_to_string(cx.tcx(), type_))[]); } } @@ -297,8 +297,8 @@ fn register_unique_id_with_metadata(&mut self, metadata: DIType) { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); - cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!", - unique_type_id_str.index(&FullRange)).index(&FullRange)); + cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!", + &unique_type_id_str[])[]); } } @@ -335,13 +335,13 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, // unique ptr (~) -> {~ :pointee-uid:} // @-ptr (@) -> {@ :pointee-uid:} // sized vec ([T; x]) -> {[:size:] :element-uid:} - // unsized vec ([T]) -> {.index(&FullRange) :element-uid:} + // unsized vec ([T]) -> {[] :element-uid:} // trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> } // closure -> { :store-sigil: |(:param-uid:),* <,_...>| -> \ // :return-type-uid: : (:bounds:)*} // function -> { fn( (:param-uid:)* <,_...> ) -> \ // :return-type-uid:} - // unique vec box (~.index(&FullRange)) -> {HEAP_VEC_BOX<:pointee-uid:>} + // unique vec box (~[]) -> {HEAP_VEC_BOX<:pointee-uid:>} // gc box -> {GC_BOX<:pointee-uid:>} match self.type_to_unique_id.get(&type_).cloned() { @@ -379,14 +379,14 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, self.get_unique_type_id_of_type(cx, component_type); let component_type_id = self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(component_type_id.index(&FullRange)); + unique_type_id.push_str(&component_type_id[]); } }, ty::ty_uniq(inner_type) => { unique_type_id.push('~'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.index(&FullRange)); + unique_type_id.push_str(&inner_type_id[]); }, ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => { unique_type_id.push('*'); @@ -396,7 +396,7 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.index(&FullRange)); + unique_type_id.push_str(&inner_type_id[]); }, ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => { unique_type_id.push('&'); @@ -406,12 +406,12 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.index(&FullRange)); + unique_type_id.push_str(&inner_type_id[]); }, ty::ty_vec(inner_type, optional_length) => { match optional_length { Some(len) => { - unique_type_id.push_str(format!("[{}]", len).index(&FullRange)); + unique_type_id.push_str(&format!("[{}]", len)[]); } None => { unique_type_id.push_str("[]"); @@ -420,7 +420,7 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.index(&FullRange)); + unique_type_id.push_str(&inner_type_id[]); }, ty::ty_trait(ref trait_data) => { unique_type_id.push_str("trait "); @@ -451,7 +451,7 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id.index(&FullRange)); + unique_type_id.push_str(¶meter_type_id[]); unique_type_id.push(','); } @@ -464,7 +464,7 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id.index(&FullRange)); + unique_type_id.push_str(&return_type_id[]); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -479,9 +479,9 @@ fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, &mut unique_type_id); }, _ => { - cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}", - ppaux::ty_to_string(cx.tcx(), type_).index(&FullRange), - type_.sty).index(&FullRange)) + cx.sess().bug(&format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}", + &ppaux::ty_to_string(cx.tcx(), type_)[], + type_.sty)[]) } }; @@ -524,7 +524,7 @@ fn from_def_id_and_substs<'a, 'tcx>(type_map: &mut TypeMap<'tcx>, output.push_str(crate_hash.as_str()); output.push_str("/"); - output.push_str(format!("{:x}", def_id.node).index(&FullRange)); + output.push_str(&format!("{:x}", def_id.node)[]); // Maybe check that there is no self type here. @@ -537,7 +537,7 @@ fn from_def_id_and_substs<'a, 'tcx>(type_map: &mut TypeMap<'tcx>, type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(param_type_id.index(&FullRange)); + output.push_str(¶m_type_id[]); output.push(','); } @@ -581,7 +581,7 @@ fn get_unique_type_id_of_closure_type<'a>(&mut self, self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id.index(&FullRange)); + unique_type_id.push_str(¶meter_type_id[]); unique_type_id.push(','); } @@ -595,7 +595,7 @@ fn get_unique_type_id_of_closure_type<'a>(&mut self, ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id.index(&FullRange)); + unique_type_id.push_str(&return_type_id[]); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -625,8 +625,7 @@ fn get_unique_type_id_of_enum_variant<'a>(&mut self, -> UniqueTypeId { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); let enum_variant_type_id = format!("{}::{}", - self.get_unique_type_id_as_string(enum_type_id) - .index(&FullRange), + &self.get_unique_type_id_as_string(enum_type_id)[], variant_name); let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id)); UniqueTypeId(interner_key) @@ -803,23 +802,23 @@ pub fn create_global_var_metadata(cx: &CrateContext, _ => { cx.sess() .span_bug(item.span, - format!("debuginfo::\ + &format!("debuginfo::\ create_global_var_metadata() - Captured var-id refers to \ unexpected ast_item variant: {:?}", - var_item).index(&FullRange)) + var_item)[]) } } }, - _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \ + _ => cx.sess().bug(&format!("debuginfo::create_global_var_metadata() \ - Captured var-id refers to unexpected \ ast_map variant: {:?}", - var_item).index(&FullRange)) + var_item)[]) }; let (file_metadata, line_number) = if span != codemap::DUMMY_SP { let loc = span_start(cx, span); - (file_metadata(cx, loc.file.name.index(&FullRange)), loc.line as c_uint) + (file_metadata(cx, &loc.file.name[]), loc.line as c_uint) } else { (UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER) }; @@ -830,7 +829,7 @@ pub fn create_global_var_metadata(cx: &CrateContext, let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let var_name = token::get_ident(ident).get().to_string(); let linkage_name = - namespace_node.mangled_name_of_contained_item(var_name.index(&FullRange)); + namespace_node.mangled_name_of_contained_item(&var_name[]); let var_scope = namespace_node.scope; let var_name = CString::from_slice(var_name.as_bytes()); @@ -868,8 +867,8 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) { Some(datum) => datum, None => { bcx.sess().span_bug(span, - format!("no entry in lllocals table for {}", - node_id).index(&FullRange)); + &format!("no entry in lllocals table for {}", + node_id)[]); } }; @@ -919,21 +918,21 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { cx.sess() .span_bug(span, - format!( + &format!( "debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {:?}", - ast_item).index(&FullRange)); + ast_item)[]); } } } _ => { cx.sess() .span_bug(span, - format!("debuginfo::create_captured_var_metadata() - \ + &format!("debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {:?}", - ast_item).index(&FullRange)); + ast_item)[]); } }; @@ -963,7 +962,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let variable_access = IndirectVariable { alloca: env_pointer, - address_operations: address_operations.index(&(0..address_op_count)) + address_operations: &address_operations[0..address_op_count] }; declare_local(bcx, @@ -1039,8 +1038,8 @@ pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) { Some(v) => v, None => { bcx.sess().span_bug(span, - format!("no entry in lllocals table for {}", - node_id).index(&FullRange)); + &format!("no entry in lllocals table for {}", + node_id)[]); } }; @@ -1154,7 +1153,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, if let Some(code_snippet) = code_snippet { let bytes = code_snippet.as_bytes(); - if bytes.len() > 0 && bytes.index(&((bytes.len()-1)..)) == b"}" { + if bytes.len() > 0 && &bytes[(bytes.len()-1)..] == b"}" { cleanup_span = Span { lo: node_span.hi - codemap::BytePos(1), hi: node_span.hi, @@ -1298,7 +1297,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match expr.node { ast::ExprClosure(_, _, ref fn_decl, ref top_level_block) => { let name = format!("fn{}", token::gensym("fn")); - let name = token::str_to_ident(name.index(&FullRange)); + let name = token::str_to_ident(&name[]); (name, &**fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. @@ -1328,9 +1327,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } _ => { cx.sess() - .bug(format!("create_function_debug_context: \ + .bug(&format!("create_function_debug_context: \ unexpected sort of node: {:?}", - fnitem).index(&FullRange)) + fnitem)[]) } } } @@ -1339,9 +1338,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ast_map::NodeStructCtor(..) => { return FunctionDebugContext::FunctionWithoutDebugInfo; } - _ => cx.sess().bug(format!("create_function_debug_context: \ + _ => cx.sess().bug(&format!("create_function_debug_context: \ unexpected sort of node: {:?}", - fnitem).index(&FullRange)) + fnitem)[]) }; // This can be the case for functions inlined from another crate @@ -1350,7 +1349,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); + let file_metadata = file_metadata(cx, &loc.file.name[]); let function_type_metadata = unsafe { let fn_signature = get_function_signature(cx, @@ -1377,7 +1376,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (linkage_name, containing_scope) = if has_path { let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id)); let linkage_name = namespace_node.mangled_name_of_contained_item( - function_name.index(&FullRange)); + &function_name[]); let containing_scope = namespace_node.scope; (linkage_name, containing_scope) } else { @@ -1465,7 +1464,7 @@ fn get_function_signature<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP)); } - return create_DIArray(DIB(cx), signature.index(&FullRange)); + return create_DIArray(DIB(cx), &signature[]); } fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -1500,7 +1499,7 @@ fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, actual_self_type, true); - name_to_append_suffix_to.push_str(actual_self_type_name.index(&FullRange)); + name_to_append_suffix_to.push_str(&actual_self_type_name[]); if generics.is_type_parameterized() { name_to_append_suffix_to.push_str(","); @@ -1539,7 +1538,7 @@ fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); - name_to_append_suffix_to.push_str(actual_type_name.index(&FullRange)); + name_to_append_suffix_to.push_str(&actual_type_name[]); if index != generics.ty_params.len() - 1 { name_to_append_suffix_to.push_str(","); @@ -1566,7 +1565,7 @@ fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, name_to_append_suffix_to.push('>'); - return create_DIArray(DIB(cx), template_params.index(&FullRange)); + return create_DIArray(DIB(cx), &template_params[]); } } @@ -1660,7 +1659,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); - let file_metadata = file_metadata(cx, filename.index(&FullRange)); + let file_metadata = file_metadata(cx, &filename[]); let name = token::get_ident(variable_ident); let loc = span_start(cx, span); @@ -1746,7 +1745,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile { let work_dir = cx.sess().working_dir.as_str().unwrap(); let file_name = if full_path.starts_with(work_dir) { - full_path.index(&((work_dir.len() + 1u)..full_path.len())) + &full_path[(work_dir.len() + 1u)..full_path.len()] } else { full_path }; @@ -1777,8 +1776,8 @@ fn scope_metadata(fcx: &FunctionContext, let node = fcx.ccx.tcx().map.get(node_id); fcx.ccx.sess().span_bug(error_reporting_span, - format!("debuginfo: Could not find scope info for node {:?}", - node).index(&FullRange)); + &format!("debuginfo: Could not find scope info for node {:?}", + node)[]); } } } @@ -1971,10 +1970,10 @@ fn finalize<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> MetadataCreationResult { let type_map = debug_context(cx).type_map.borrow(); if type_map.find_metadata_for_unique_id(unique_type_id).is_none() || type_map.find_metadata_for_type(unfinished_type).is_none() { - cx.sess().bug(format!("Forward declaration of potentially recursive type \ + cx.sess().bug(&format!("Forward declaration of potentially recursive type \ '{}' was not found in TypeMap!", ppaux::ty_to_string(cx.tcx(), unfinished_type)) - .index(&FullRange)); + []); } } @@ -1986,7 +1985,7 @@ fn finalize<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> MetadataCreationResult { set_members_of_composite_type(cx, metadata_stub, llvm_type, - member_descriptions.index(&FullRange)); + &member_descriptions[]); return MetadataCreationResult::new(metadata_stub, true); } } @@ -2058,7 +2057,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, - struct_name.index(&FullRange), + &struct_name[], unique_type_id, containing_scope); @@ -2119,7 +2118,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id, create_struct_stub(cx, tuple_llvm_type, - tuple_name.index(&FullRange), + &tuple_name[], unique_type_id, UNKNOWN_SCOPE_METADATA), tuple_llvm_type, @@ -2179,7 +2178,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>) set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions.index(&FullRange)); + &member_descriptions[]); MemberDescription { name: "".to_string(), llvm_type: variant_llvm_type, @@ -2212,7 +2211,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>) set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions.index(&FullRange)); + &member_descriptions[]); vec![ MemberDescription { name: "".to_string(), @@ -2312,7 +2311,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>) set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - variant_member_descriptions.index(&FullRange)); + &variant_member_descriptions[]); // Encode the information about the null variant in the union // member's name. @@ -2387,11 +2386,11 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, span: Span) -> (DICompositeType, Type, MemberDescriptionFactory<'tcx>) { let variant_llvm_type = - Type::struct_(cx, struct_def.fields + Type::struct_(cx, &struct_def.fields .iter() .map(|&t| type_of::type_of(cx, t)) .collect::>() - .index(&FullRange), + [], struct_def.packed); // Could do some consistency checks here: size, align, field count, discr type @@ -2458,7 +2457,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let loc = span_start(cx, definition_span); - let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); + let file_metadata = file_metadata(cx, &loc.file.name[]); let variants = ty::enum_variants(cx.tcx(), enum_def_id); @@ -2638,14 +2637,14 @@ fn set_members_of_composite_type(cx: &CrateContext, let min_supported_llvm_version = 3 * 1000000 + 4 * 1000; if actual_llvm_version < min_supported_llvm_version { - cx.sess().warn(format!("This version of rustc was built with LLVM \ + cx.sess().warn(&format!("This version of rustc was built with LLVM \ {}.{}. Rustc just ran into a known \ debuginfo corruption problem thatoften \ occurs with LLVM versions below 3.4. \ Please use a rustc built with anewer \ version of LLVM.", llvm_version_major, - llvm_version_minor).index(&FullRange)); + llvm_version_minor)[]); } else { cx.sess().bug("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); @@ -2683,7 +2682,7 @@ fn set_members_of_composite_type(cx: &CrateContext, .collect(); unsafe { - let type_array = create_DIArray(DIB(cx), member_metadata.index(&FullRange)); + let type_array = create_DIArray(DIB(cx), &member_metadata[]); llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array); } } @@ -2782,7 +2781,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let member_llvm_types = slice_llvm_type.field_types(); assert!(slice_layout_is_correct(cx, - member_llvm_types.index(&FullRange), + &member_llvm_types[], element_type)); let member_descriptions = [ MemberDescription { @@ -2804,11 +2803,11 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); + let file_metadata = file_metadata(cx, &loc.file.name[]); let metadata = composite_type_metadata(cx, slice_llvm_type, - slice_type_name.index(&FullRange), + &slice_type_name[], unique_type_id, &member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2857,7 +2856,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, llvm::LLVMDIBuilderCreateSubroutineType( DIB(cx), UNKNOWN_FILE_METADATA, - create_DIArray(DIB(cx), signature_metadata.index(&FullRange))) + create_DIArray(DIB(cx), &signature_metadata[])) }, false); } @@ -2881,9 +2880,9 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_trait(ref data) => data.principal_def_id(), _ => { let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type); - cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \ + cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \ trait_pointer_metadata(): {}", - pp_type_name.index(&FullRange)).index(&FullRange)); + &pp_type_name[])[]); } }; @@ -2897,7 +2896,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, composite_type_metadata(cx, trait_llvm_type, - trait_type_name.index(&FullRange), + &trait_type_name[], unique_type_id, &[], containing_scope, @@ -3017,13 +3016,13 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_tup(ref elements) => { prepare_tuple_metadata(cx, t, - elements.index(&FullRange), + &elements[], unique_type_id, usage_site_span).finalize(cx) } _ => { - cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {:?}", - sty).index(&FullRange)) + cx.sess().bug(&format!("debuginfo: unexpected type in type_metadata: {:?}", + sty)[]) } }; @@ -3041,9 +3040,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (Ty = {})", - unique_type_id_str.index(&FullRange), + &unique_type_id_str[], ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message.index(&FullRange)); + cx.sess().span_bug(usage_site_span, &error_message[]); } }; @@ -3056,9 +3055,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, Ty={}", - unique_type_id_str.index(&FullRange), + &unique_type_id_str[], ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message.index(&FullRange)); + cx.sess().span_bug(usage_site_span, &error_message[]); } } None => { @@ -3264,7 +3263,7 @@ fn with_new_scope(cx: &CrateContext, { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); - let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); + let file_metadata = file_metadata(cx, &loc.file.name[]); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3386,7 +3385,7 @@ fn walk_pattern(cx: &CrateContext, if need_new_scope { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo); - let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); + let file_metadata = file_metadata(cx, &loc.file.name[]); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3861,8 +3860,8 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_open(_) | ty::ty_projection(..) | ty::ty_param(_) => { - cx.sess().bug(format!("debuginfo: Trying to create type name for \ - unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)); + cx.sess().bug(&format!("debuginfo: Trying to create type name for \ + unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]); } } @@ -3945,13 +3944,13 @@ fn fill_nested(node: &NamespaceTreeNode, output: &mut String) { None => {} } let string = token::get_name(node.name); - output.push_str(format!("{}", string.get().len()).index(&FullRange)); + output.push_str(&format!("{}", string.get().len())[]); output.push_str(string.get()); } let mut name = String::from_str("_ZN"); fill_nested(self, &mut name); - name.push_str(format!("{}", item_name.len()).index(&FullRange)); + name.push_str(&format!("{}", item_name.len())[]); name.push_str(item_name); name.push('E'); name @@ -3959,7 +3958,7 @@ fn fill_nested(node: &NamespaceTreeNode, output: &mut String) { } fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str { - cx.link_meta().crate_name.index(&FullRange) + &cx.link_meta().crate_name[] } fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { @@ -4034,9 +4033,9 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc node, None => { - cx.sess().bug(format!("debuginfo::namespace_for_item(): \ + cx.sess().bug(&format!("debuginfo::namespace_for_item(): \ path too short for {:?}", - def_id).index(&FullRange)); + def_id)[]); } } }) diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs index 120e2e955e4..2d1a151c2b8 100644 --- a/src/librustc_trans/trans/expr.rs +++ b/src/librustc_trans/trans/expr.rs @@ -317,8 +317,8 @@ fn identity(t: T) -> T { t } // should just be the identity function. unsized_info(bcx, k, id, ty_substs[tp_index], identity) } - _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}", - bcx.ty_to_string(unadjusted_ty)).index(&FullRange)) + _ => bcx.sess().bug(&format!("UnsizeStruct with bad sty: {}", + bcx.ty_to_string(unadjusted_ty))[]) }, &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => { // Note that we preserve binding levels here: @@ -450,8 +450,8 @@ fn unsize_unique_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let datum_ty = datum.ty; let unboxed_ty = match datum_ty.sty { ty::ty_uniq(t) => t, - _ => bcx.sess().bug(format!("Expected ty_uniq, found {}", - bcx.ty_to_string(datum_ty)).index(&FullRange)) + _ => bcx.sess().bug(&format!("Expected ty_uniq, found {}", + bcx.ty_to_string(datum_ty))[]) }; let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span)); @@ -622,9 +622,9 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.tcx().sess.span_bug( expr.span, - format!("trans_rvalue_datum_unadjusted reached \ + &format!("trans_rvalue_datum_unadjusted reached \ fall-through case: {:?}", - expr.node).index(&FullRange)); + expr.node)[]); } } } @@ -975,9 +975,9 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.tcx().sess.span_bug( expr.span, - format!("trans_rvalue_stmt_unadjusted reached \ + &format!("trans_rvalue_stmt_unadjusted reached \ fall-through case: {:?}", - expr.node).index(&FullRange)); + expr.node)[]); } } } @@ -1003,14 +1003,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest) } ast::ExprMatch(ref discr, ref arms, _) => { - _match::trans_match(bcx, expr, &**discr, arms.index(&FullRange), dest) + _match::trans_match(bcx, expr, &**discr, &arms[], dest) } ast::ExprBlock(ref blk) => { controlflow::trans_block(bcx, &**blk, dest) } ast::ExprStruct(_, ref fields, ref base) => { trans_struct(bcx, - fields.index(&FullRange), + &fields[], base.as_ref().map(|e| &**e), expr.span, expr.id, @@ -1075,7 +1075,7 @@ fn make_field(field_name: &str, expr: P) -> ast::Field { trans_adt(bcx, expr_ty(bcx, expr), 0, - numbered_fields.index(&FullRange), + &numbered_fields[], None, dest, Some(NodeInfo { id: expr.id, span: expr.span })) @@ -1119,13 +1119,13 @@ fn make_field(field_name: &str, expr: P) -> ast::Field { trans_overloaded_call(bcx, expr, &**f, - args.index(&FullRange), + &args[], Some(dest)) } else { callee::trans_call(bcx, expr, &**f, - callee::ArgExprs(args.index(&FullRange)), + callee::ArgExprs(&args[]), dest) } } @@ -1133,7 +1133,7 @@ fn make_field(field_name: &str, expr: P) -> ast::Field { callee::trans_method_call(bcx, expr, &*args[0], - callee::ArgExprs(args.index(&FullRange)), + callee::ArgExprs(&args[]), dest) } ast::ExprBinary(op, ref lhs, ref rhs) => { @@ -1180,9 +1180,9 @@ fn make_field(field_name: &str, expr: P) -> ast::Field { _ => { bcx.tcx().sess.span_bug( expr.span, - format!("trans_rvalue_dps_unadjusted reached fall-through \ + &format!("trans_rvalue_dps_unadjusted reached fall-through \ case: {:?}", - expr.node).index(&FullRange)); + expr.node)[]); } } } @@ -1230,9 +1230,9 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx } _ => { - bcx.tcx().sess.span_bug(ref_expr.span, format!( + bcx.tcx().sess.span_bug(ref_expr.span, &format!( "Non-DPS def {:?} referened by {}", - def, bcx.node_id_to_string(ref_expr.id)).index(&FullRange)); + def, bcx.node_id_to_string(ref_expr.id))[]); } } } @@ -1258,10 +1258,10 @@ pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, param_substs) } _ => { - ccx.tcx().sess.span_bug(ref_expr.span, format!( + ccx.tcx().sess.span_bug(ref_expr.span, &format!( "trans_def_fn_unadjusted invoked on: {:?} for {}", def, - ref_expr.repr(ccx.tcx())).index(&FullRange)); + ref_expr.repr(ccx.tcx()))[]); } } } @@ -1279,9 +1279,9 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, match bcx.fcx.llupvars.borrow().get(&nid) { Some(&val) => Datum::new(val, local_ty, Lvalue), None => { - bcx.sess().bug(format!( + bcx.sess().bug(&format!( "trans_local_var: no llval for upvar {} found", - nid).index(&FullRange)); + nid)[]); } } } @@ -1289,9 +1289,9 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let datum = match bcx.fcx.lllocals.borrow().get(&nid) { Some(&v) => v, None => { - bcx.sess().bug(format!( + bcx.sess().bug(&format!( "trans_local_var: no datum for local/arg {} found", - nid).index(&FullRange)); + nid)[]); } }; debug!("take_local(nid={}, v={}, ty={})", @@ -1299,9 +1299,9 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, datum } _ => { - bcx.sess().unimpl(format!( + bcx.sess().unimpl(&format!( "unsupported def type in trans_local_var: {:?}", - def).index(&FullRange)); + def)[]); } } } @@ -1318,21 +1318,21 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, { match ty.sty { ty::ty_struct(did, substs) => { - op(0, struct_fields(tcx, did, substs).index(&FullRange)) + op(0, &struct_fields(tcx, did, substs)[]) } ty::ty_tup(ref v) => { - op(0, tup_fields(v.index(&FullRange)).index(&FullRange)) + op(0, &tup_fields(&v[])[]) } ty::ty_enum(_, substs) => { // We want the *variant* ID here, not the enum ID. match node_id_opt { None => { - tcx.sess.bug(format!( + tcx.sess.bug(&format!( "cannot get field types from the enum type {} \ without a node ID", - ty.repr(tcx)).index(&FullRange)); + ty.repr(tcx))[]); } Some(node_id) => { let def = tcx.def_map.borrow()[node_id].clone(); @@ -1341,9 +1341,9 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, let variant_info = ty::enum_variant_with_id( tcx, enum_id, variant_id); op(variant_info.disr_val, - struct_fields(tcx, + &struct_fields(tcx, variant_id, - substs).index(&FullRange)) + substs)[]) } _ => { tcx.sess.bug("resolve didn't map this expr to a \ @@ -1355,9 +1355,9 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, } _ => { - tcx.sess.bug(format!( + tcx.sess.bug(&format!( "cannot get field types from the type {}", - ty.repr(tcx)).index(&FullRange)); + ty.repr(tcx))[]); } } } @@ -2045,21 +2045,21 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, lldiscrim_a, true), cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out), _ => { - ccx.sess().bug(format!("translating unsupported cast: \ + ccx.sess().bug(&format!("translating unsupported cast: \ {} ({:?}) -> {} ({:?})", t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out).index(&FullRange)) + k_out)[]) } } } - _ => ccx.sess().bug(format!("translating unsupported cast: \ + _ => ccx.sess().bug(&format!("translating unsupported cast: \ {} ({:?}) -> {} ({:?})", t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out).index(&FullRange)) + k_out)[]) }; return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock(); } @@ -2224,8 +2224,8 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.tcx().sess.span_bug( expr.span, - format!("deref invoked on expr of illegal type {}", - datum.ty.repr(bcx.tcx())).index(&FullRange)); + &format!("deref invoked on expr of illegal type {}", + datum.ty.repr(bcx.tcx()))[]); } }; diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs index 25eb66ab2eb..3dfb36c854b 100644 --- a/src/librustc_trans/trans/foreign.rs +++ b/src/librustc_trans/trans/foreign.rs @@ -109,7 +109,7 @@ pub fn register_static(ccx: &CrateContext, let llty = type_of::type_of(ccx, ty); let ident = link_name(foreign_item); - match attr::first_attr_value_str_by_name(foreign_item.attrs.index(&FullRange), + match attr::first_attr_value_str_by_name(&foreign_item.attrs[], "linkage") { // If this is a static with a linkage specified, then we need to handle // it a little specially. The typesystem prevents things like &T and @@ -235,13 +235,13 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => ccx.sess().bug("trans_native_call called on non-function type") }; let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig); - let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.index(&FullRange)); + let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[]); let fn_type = cabi::compute_abi_info(ccx, - llsig.llarg_tys.index(&FullRange), + &llsig.llarg_tys[], llsig.llret_ty, llsig.ret_def); - let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.index(&FullRange); + let arg_tys: &[cabi::ArgType] = &fn_type.arg_tys[]; let mut llargs_foreign = Vec::new(); @@ -367,7 +367,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llforeign_retval = CallWithConv(bcx, llfn, - llargs_foreign.index(&FullRange), + &llargs_foreign[], cc, Some(attrs)); @@ -437,7 +437,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) { abi => { let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id); register_foreign_item_fn(ccx, abi, ty, - lname.get().index(&FullRange)); + &lname.get()[]); // Unlike for other items, we shouldn't call // `base::update_linkage` here. Foreign items have // special linkage requirements, which are handled @@ -566,10 +566,10 @@ fn build_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, assert!(f.abi != Rust && f.abi != RustIntrinsic); } _ => { - ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ + ccx.sess().bug(&format!("build_rust_fn: extern fn {} has ty {}, \ expected a bare fn ty", ccx.tcx().map.path_to_string(id), - t.repr(tcx)).index(&FullRange)); + t.repr(tcx))[]); } }; @@ -577,7 +577,7 @@ fn build_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.tcx().map.path_to_string(id), id, t.repr(tcx)); - let llfn = base::decl_internal_rust_fn(ccx, t, ps.index(&FullRange)); + let llfn = base::decl_internal_rust_fn(ccx, t, &ps[]); base::set_llvm_fn_attrs(ccx, attrs, llfn); base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]); llfn @@ -817,9 +817,9 @@ unsafe fn build_wrap_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // the massive simplifications that have occurred. pub fn link_name(i: &ast::ForeignItem) -> InternedString { - match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), "link_name") { + match attr::first_attr_value_str_by_name(&i.attrs[], "link_name") { Some(ln) => ln.clone(), - None => match weak_lang_items::link_name(i.attrs.index(&FullRange)) { + None => match weak_lang_items::link_name(&i.attrs[]) { Some(name) => name, None => token::get_ident(i.ident), } @@ -862,7 +862,7 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig); let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs.as_slice()); let fn_ty = cabi::compute_abi_info(ccx, - llsig.llarg_tys.index(&FullRange), + &llsig.llarg_tys[], llsig.llret_ty, llsig.ret_def); debug!("foreign_types_for_fn_ty(\ @@ -871,7 +871,7 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty={} -> {}, \ ret_def={}", ty.repr(ccx.tcx()), - ccx.tn().types_to_str(llsig.llarg_tys.index(&FullRange)), + ccx.tn().types_to_str(&llsig.llarg_tys[]), ccx.tn().type_to_string(llsig.llret_ty), ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>().as_slice()), ccx.tn().type_to_string(fn_ty.ret_ty.ty), @@ -923,7 +923,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T if tys.fn_sig.variadic { Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty) } else { - Type::func(llargument_tys.index(&FullRange), &llreturn_ty) + Type::func(&llargument_tys[], &llreturn_ty) } } diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs index 52e7a986d7e..2219cd59263 100644 --- a/src/librustc_trans/trans/glue.rs +++ b/src/librustc_trans/trans/glue.rs @@ -161,7 +161,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) { Some(old_sym) => { - let glue = decl_cdecl_fn(ccx, old_sym.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx())); + let glue = decl_cdecl_fn(ccx, &old_sym[], llfnty, ty::mk_nil(ccx.tcx())); (glue, None) }, None => { @@ -233,8 +233,8 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, assert!(sig.inputs.len() == 1); sig.inputs[0] } - _ => bcx.sess().bug(format!("Expected function type, found {}", - bcx.ty_to_string(fty)).index(&FullRange)) + _ => bcx.sess().bug(&format!("Expected function type, found {}", + bcx.ty_to_string(fty))[]) }; let (struct_data, info) = if type_is_sized(bcx.tcx(), t) { @@ -295,7 +295,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, class_did, &[get_drop_glue_type(bcx.ccx(), t)], ty::mk_nil(bcx.tcx())); - let (_, variant_cx) = invoke(variant_cx, dtor_addr, args.index(&FullRange), dtor_ty, None); + let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, None); variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope); variant_cx @@ -353,8 +353,8 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty); (Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u)) } - _ => bcx.sess().bug(format!("Unexpected unsized type, found {}", - bcx.ty_to_string(t)).index(&FullRange)) + _ => bcx.sess().bug(&format!("Unexpected unsized type, found {}", + bcx.ty_to_string(t))[]) } } @@ -423,10 +423,10 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>) } else { // Give the user a heads up that we are doing something // stupid and dangerous. - bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\ + bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\ because the struct is unsized. See issue\ #16758", - bcx.ty_to_string(t)).index(&FullRange)); + bcx.ty_to_string(t))[]); trans_struct_drop(bcx, t, v0, dtor, did, substs) } } @@ -496,7 +496,7 @@ pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) note_unique_llvm_symbol(ccx, name); let ty_name = token::intern_and_get_ident( - ppaux::ty_to_string(ccx.tcx(), t).index(&FullRange)); + &ppaux::ty_to_string(ccx.tcx(), t)[]); let ty_name = C_str_slice(ccx, ty_name); debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t)); @@ -515,8 +515,8 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, let fn_nm = mangle_internal_name_by_type_and_seq( ccx, t, - format!("glue_{}", name).index(&FullRange)); - let llfn = decl_cdecl_fn(ccx, fn_nm.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx())); + &format!("glue_{}", name)[]); + let llfn = decl_cdecl_fn(ccx, &fn_nm[], llfnty, ty::mk_nil(ccx.tcx())); note_unique_llvm_symbol(ccx, fn_nm.clone()); return (fn_nm, llfn); } diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs index 28718ffa980..7ac062108f3 100644 --- a/src/librustc_trans/trans/meth.rs +++ b/src/librustc_trans/trans/meth.rs @@ -77,7 +77,7 @@ pub fn trans_impl(ccx: &CrateContext, match *impl_item { ast::MethodImplItem(ref method) => { if method.pe_generics().ty_params.len() == 0u { - let trans_everywhere = attr::requests_inline(method.attrs.index(&FullRange)); + let trans_everywhere = attr::requests_inline(&method.attrs[]); for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) { let llfn = get_item_val(ccx, method.id); trans_fn(ccx, @@ -229,7 +229,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Here, in this call, which I've written with explicit UFCS // notation, the set of type parameters will be: // - // rcvr_type: .index(&FullRange) <-- nothing declared on the trait itself + // rcvr_type: [] <-- nothing declared on the trait itself // rcvr_self: [Vec] <-- the self type // rcvr_method: [String] <-- method type parameter // @@ -268,11 +268,11 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // // Recall that we matched ` as Convert>`. Trait // resolution will have given us a substitution - // containing `impl_substs=[[T=int],.index(&FullRange),.index(&FullRange)]` (the type + // containing `impl_substs=[[T=int],[],[]]` (the type // parameters defined on the impl). We combine // that with the `rcvr_method` from before, which tells us // the type parameters from the *method*, to yield - // `callee_substs=[[T=int],.index(&FullRange),[U=String]]`. + // `callee_substs=[[T=int],[],[U=String]]`. let subst::SeparateVecsPerParamSpace { types: impl_type, selfs: impl_self, @@ -289,8 +289,8 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, callee_substs) } _ => { - tcx.sess.bug(format!("static call to invalid vtable: {}", - vtbl.repr(tcx)).index(&FullRange)); + tcx.sess.bug(&format!("static call to invalid vtable: {}", + vtbl.repr(tcx))[]); } } } @@ -377,8 +377,8 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, traits::VtableBuiltin(..) | traits::VtableParam(..) => { bcx.sess().bug( - format!("resolved vtable bad vtable {} in trans", - vtable.repr(bcx.tcx())).index(&FullRange)); + &format!("resolved vtable bad vtable {} in trans", + vtable.repr(bcx.tcx()))[]); } } } @@ -738,9 +738,9 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } traits::VtableParam => { bcx.sess().bug( - format!("resolved vtable for {} to bad vtable {} in trans", + &format!("resolved vtable for {} to bad vtable {} in trans", trait_ref.repr(bcx.tcx()), - vtable.repr(bcx.tcx())).index(&FullRange)); + vtable.repr(bcx.tcx()))[]); } } }); @@ -772,7 +772,7 @@ pub fn make_vtable>(ccx: &CrateContext, let components: Vec<_> = head.into_iter().chain(ptrs).collect(); unsafe { - let tbl = C_struct(ccx, components.index(&FullRange), false); + let tbl = C_struct(ccx, &components[], false); let sym = token::gensym("vtable"); let buf = CString::from_vec(format!("vtable{}", sym.uint()).into_bytes()); let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(), diff --git a/src/librustc_trans/trans/monomorphize.rs b/src/librustc_trans/trans/monomorphize.rs index e2594765f4f..dd8cb90f1a6 100644 --- a/src/librustc_trans/trans/monomorphize.rs +++ b/src/librustc_trans/trans/monomorphize.rs @@ -131,7 +131,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, hash = format!("h{}", state.result()); ccx.tcx().map.with_path(fn_id.node, |path| { - exported_name(path, hash.index(&FullRange)) + exported_name(path, &hash[]) }) }; @@ -141,9 +141,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let mut hash_id = Some(hash_id); let mut mk_lldecl = |&mut : abi: abi::Abi| { let lldecl = if abi != abi::Rust { - foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.index(&FullRange)) + foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[]) } else { - decl_internal_rust_fn(ccx, mono_ty, s.index(&FullRange)) + decl_internal_rust_fn(ccx, mono_ty, &s[]) }; ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl); @@ -177,12 +177,12 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, .. } => { let d = mk_lldecl(abi); - let needs_body = setup_lldecl(d, i.attrs.index(&FullRange)); + let needs_body = setup_lldecl(d, &i.attrs[]); if needs_body { if abi != abi::Rust { foreign::trans_rust_fn_with_foreign_abi( ccx, &**decl, &**body, &[], d, psubsts, fn_id.node, - Some(hash.index(&FullRange))); + Some(&hash[])); } else { trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]); } @@ -206,7 +206,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, trans_enum_variant(ccx, parent, &*v, - args.index(&FullRange), + &args[], this_tv.disr_val, psubsts, d); @@ -220,7 +220,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *ii { ast::MethodImplItem(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange)); + let needs_body = setup_lldecl(d, &mth.attrs[]); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), @@ -241,7 +241,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *method { ast::ProvidedMethod(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange)); + let needs_body = setup_lldecl(d, &mth.attrs[]); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d, psubsts, mth.id, &[]); @@ -249,8 +249,8 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, d } _ => { - ccx.sess().bug(format!("can't monomorphize a {:?}", - map_node).index(&FullRange)) + ccx.sess().bug(&format!("can't monomorphize a {:?}", + map_node)[]) } } } @@ -258,7 +258,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let d = mk_lldecl(abi::Rust); set_inline_hint(d); base::trans_tuple_struct(ccx, - struct_def.fields.index(&FullRange), + &struct_def.fields[], struct_def.ctor_id.expect("ast-mapped tuple struct \ didn't have a ctor id"), psubsts, @@ -275,8 +275,8 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ast_map::NodeBlock(..) | ast_map::NodePat(..) | ast_map::NodeLocal(..) => { - ccx.sess().bug(format!("can't monomorphize a {:?}", - map_node).index(&FullRange)) + ccx.sess().bug(&format!("can't monomorphize a {:?}", + map_node)[]) } }; diff --git a/src/librustc_trans/trans/type_.rs b/src/librustc_trans/trans/type_.rs index 66e27ed1188..8de1108fef8 100644 --- a/src/librustc_trans/trans/type_.rs +++ b/src/librustc_trans/trans/type_.rs @@ -103,7 +103,7 @@ pub fn i8p(ccx: &CrateContext) -> Type { } pub fn int(ccx: &CrateContext) -> Type { - match ccx.tcx().sess.target.target.target_word_size.index(&FullRange) { + match &ccx.tcx().sess.target.target.target_word_size[] { "32" => Type::i32(ccx), "64" => Type::i64(ccx), tws => panic!("Unsupported target word size for int: {}", tws), diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs index 19d50cdd483..c908441e366 100644 --- a/src/librustc_trans/trans/type_of.rs +++ b/src/librustc_trans/trans/type_of.rs @@ -140,7 +140,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty)); atys.extend(input_tys); - Type::func(atys.index(&FullRange), &lloutputtype) + Type::func(&atys[], &lloutputtype) } // Given a function type and a count of ty params, construct an llvm type @@ -180,8 +180,8 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ let llsizingty = match t.sty { _ if !lltype_is_sized(cx.tcx(), t) => { - cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type", - ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) + cx.sess().bug(&format!("trying to take the sizing type of {}, an unsized type", + ppaux::ty_to_string(cx.tcx(), t))[]) } ty::ty_bool => Type::bool(cx), @@ -233,8 +233,8 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ } ty::ty_projection(..) | ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => { - cx.sess().bug(format!("fictitious type {} in sizing_type_of()", - ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) + cx.sess().bug(&format!("fictitious type {} in sizing_type_of()", + ppaux::ty_to_string(cx.tcx(), t))[]) } ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable") }; @@ -313,7 +313,7 @@ fn type_of_unsize_info<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Ty let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, an_enum, did, tps); - adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) + adt::incomplete_type_of(cx, &*repr, &name[]) } ty::ty_unboxed_closure(did, _, ref substs) => { // Only create the named struct, but don't fill it in. We @@ -324,7 +324,7 @@ fn type_of_unsize_info<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Ty // contents of the VecPerParamSpace to to construct the llvm // name let name = llvm_type_name(cx, an_unboxed_closure, did, substs.types.as_slice()); - adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) + adt::incomplete_type_of(cx, &*repr, &name[]) } ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => { @@ -380,7 +380,7 @@ fn type_of_unsize_info<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Ty let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, a_struct, did, tps); - adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) + adt::incomplete_type_of(cx, &*repr, &name[]) } } @@ -398,8 +398,8 @@ fn type_of_unsize_info<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Ty Type::struct_(cx, &[p_ty, type_of_unsize_info(cx, t)], false) } ty::ty_trait(..) => Type::opaque_trait(cx), - _ => cx.sess().bug(format!("ty_open with sized type: {}", - ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) + _ => cx.sess().bug(&format!("ty_open with sized type: {}", + ppaux::ty_to_string(cx.tcx(), t))[]) }, ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"), diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 265ebe00d53..867d6f322d1 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -193,11 +193,11 @@ pub fn opt_ast_region_to_region<'tcx>( format!("`{}`", name) }; - m.push_str(if n == 1 { + m.push_str(&(if n == 1 { help_name } else { format!("one of {}'s {} elided lifetimes", help_name, n) - }.index(&FullRange)); + })[]); if len == 2 && i == 0 { m.push_str(" or "); @@ -344,10 +344,10 @@ fn create_substs_for_ast_path<'tcx>( "expected" }; this.tcx().sess.span_fatal(span, - format!("wrong number of type arguments: {} {}, found {}", + &format!("wrong number of type arguments: {} {}, found {}", expected, required_ty_param_count, - supplied_ty_param_count).index(&FullRange)); + supplied_ty_param_count)[]); } else if supplied_ty_param_count > formal_ty_param_count { let expected = if required_ty_param_count < formal_ty_param_count { "expected at most" @@ -355,10 +355,10 @@ fn create_substs_for_ast_path<'tcx>( "expected" }; this.tcx().sess.span_fatal(span, - format!("wrong number of type arguments: {} {}, found {}", + &format!("wrong number of type arguments: {} {}, found {}", expected, formal_ty_param_count, - supplied_ty_param_count).index(&FullRange)); + supplied_ty_param_count)[]); } let mut substs = Substs::new_type(types, regions); @@ -377,7 +377,7 @@ fn create_substs_for_ast_path<'tcx>( } } - for param in ty_param_defs.index(&(supplied_ty_param_count..)).iter() { + for param in ty_param_defs[supplied_ty_param_count..].iter() { match param.default { Some(default) => { // This is a default type parameter. @@ -556,8 +556,8 @@ pub fn instantiate_trait_ref<'tcx>( _ => { this.tcx().sess.span_fatal( ast_trait_ref.path.span, - format!("`{}` is not a trait", - ast_trait_ref.path.user_string(this.tcx())).index(&FullRange)); + &format!("`{}` is not a trait", + ast_trait_ref.path.user_string(this.tcx()))[]); } } } @@ -825,8 +825,8 @@ pub fn ast_ty_to_builtin_ty<'tcx>( this.tcx() .sess .span_bug(ast_ty.span, - format!("unbound path {}", - path.repr(this.tcx())).index(&FullRange)) + &format!("unbound path {}", + path.repr(this.tcx()))[]) } Some(&d) => d }; @@ -847,8 +847,8 @@ pub fn ast_ty_to_builtin_ty<'tcx>( _ => { this.tcx().sess.span_bug( path.span, - format!("converting `Box` to `{}`", - ty.repr(this.tcx())).index(&FullRange)); + &format!("converting `Box` to `{}`", + ty.repr(this.tcx()))[]); } } } @@ -1068,14 +1068,14 @@ pub fn ast_ty_to_ty<'tcx>( ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyObjectSum(ref ty, ref bounds) => { - match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.index(&FullRange)) { + match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[]) { Ok((trait_ref, projection_bounds)) => { trait_ref_to_object_type(this, rscope, ast_ty.span, trait_ref, projection_bounds, - bounds.index(&FullRange)) + &bounds[]) } Err(ErrorReported) => { this.tcx().types.err @@ -1110,15 +1110,15 @@ pub fn ast_ty_to_ty<'tcx>( ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn)) } ast::TyPolyTraitRef(ref bounds) => { - conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.index(&FullRange)) + conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[]) } ast::TyPath(ref path, id) => { let a_def = match tcx.def_map.borrow().get(&id) { None => { tcx.sess .span_bug(ast_ty.span, - format!("unbound path {}", - path.repr(tcx)).index(&FullRange)) + &format!("unbound path {}", + path.repr(tcx))[]) } Some(&d) => d }; @@ -1156,8 +1156,8 @@ pub fn ast_ty_to_ty<'tcx>( } def::DefMod(id) => { tcx.sess.span_fatal(ast_ty.span, - format!("found module name used as a type: {}", - tcx.map.node_to_string(id.node)).index(&FullRange)); + &format!("found module name used as a type: {}", + tcx.map.node_to_string(id.node))[]); } def::DefPrimTy(_) => { panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); @@ -1166,7 +1166,7 @@ pub fn ast_ty_to_ty<'tcx>( let path_str = tcx.map.path_to_string( tcx.map.get_parent(trait_type_id.node)); tcx.sess.span_err(ast_ty.span, - format!("ambiguous associated \ + &format!("ambiguous associated \ type; specify the type \ using the syntax `::{}`", @@ -1176,7 +1176,7 @@ pub fn ast_ty_to_ty<'tcx>( .last() .unwrap() .identifier) - .get()).index(&FullRange)); + .get())[]); this.tcx().types.err } def::DefAssociatedPath(provenance, assoc_ident) => { @@ -1184,9 +1184,9 @@ pub fn ast_ty_to_ty<'tcx>( } _ => { tcx.sess.span_fatal(ast_ty.span, - format!("found value name used \ + &format!("found value name used \ as a type: {:?}", - a_def).index(&FullRange)); + a_def)[]); } } } @@ -1212,9 +1212,9 @@ pub fn ast_ty_to_ty<'tcx>( Err(ref r) => { tcx.sess.span_fatal( ast_ty.span, - format!("expected constant expr for array \ + &format!("expected constant expr for array \ length: {}", - *r).index(&FullRange)); + *r)[]); } } } @@ -1336,7 +1336,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, let input_params = if self_ty.is_some() { decl.inputs.slice_from(1) } else { - decl.inputs.index(&FullRange) + &decl.inputs[] }; let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None)); let input_pats: Vec = input_params.iter() @@ -1551,7 +1551,7 @@ fn conv_ty_poly_trait_ref<'tcx>( ast_bounds: &[ast::TyParamBound]) -> Ty<'tcx> { - let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds.index(&FullRange)); + let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[]); let mut projection_bounds = Vec::new(); let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { @@ -1600,8 +1600,8 @@ pub fn conv_existential_bounds_from_partitioned_bounds<'tcx>( let b = &trait_bounds[0]; this.tcx().sess.span_err( b.trait_ref.path.span, - format!("only the builtin traits can be used \ - as closure or object bounds").index(&FullRange)); + &format!("only the builtin traits can be used \ + as closure or object bounds")[]); } let region_bound = compute_region_bound(this, @@ -1673,8 +1673,8 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>, if derived_region_bounds.slice_from(1).iter().any(|r1| r != *r1) { tcx.sess.span_err( span, - format!("ambiguous lifetime bound, \ - explicit lifetime bound required").index(&FullRange)); + &format!("ambiguous lifetime bound, \ + explicit lifetime bound required")[]); } return Some(r); } @@ -1700,7 +1700,7 @@ fn compute_region_bound<'tcx>( None => { this.tcx().sess.span_err( span, - format!("explicit lifetime bound required").index(&FullRange)); + &format!("explicit lifetime bound required")[]); ty::ReStatic } } diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index cd27c20db45..7e72f300f41 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -314,8 +314,8 @@ fn extract_trait_ref(&mut self, self_ty: Ty<'tcx>, mut closure: F) -> R wh None => { self.tcx().sess.span_bug( self.span, - format!("self-type `{}` for ObjectPick never dereferenced to an object", - self_ty.repr(self.tcx())).index(&FullRange)) + &format!("self-type `{}` for ObjectPick never dereferenced to an object", + self_ty.repr(self.tcx()))[]) } } } @@ -367,10 +367,10 @@ fn unify_receivers(&mut self, Err(_) => { self.tcx().sess.span_bug( self.span, - format!( + &format!( "{} was a subtype of {} but now is not?", self_ty.repr(self.tcx()), - method_self_ty.repr(self.tcx())).index(&FullRange)); + method_self_ty.repr(self.tcx()))[]); } } } diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 87ea082b6b2..bb000742def 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -264,9 +264,9 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, _ => { fcx.tcx().sess.span_bug( span, - format!( + &format!( "trait method is &self but first arg is: {}", - transformed_self_ty.repr(fcx.tcx())).index(&FullRange)); + transformed_self_ty.repr(fcx.tcx()))[]); } } } @@ -274,9 +274,9 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, _ => { fcx.tcx().sess.span_bug( span, - format!( + &format!( "unexpected explicit self type in operator method: {:?}", - method_ty.explicit_self).index(&FullRange)); + method_ty.explicit_self)[]); } } } @@ -329,8 +329,8 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // If the method has the name of a field, give a help note if is_field { cx.sess.span_note(span, - format!("use `(s.{0})(...)` if you meant to call the \ - function stored in the `{0}` field", method_ustring).index(&FullRange)); + &format!("use `(s.{0})(...)` if you meant to call the \ + function stored in the `{0}` field", method_ustring)[]); } if static_sources.len() > 0 { diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 4ba161fa835..36321e5e8c6 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -575,8 +575,8 @@ fn assemble_unboxed_closure_candidates(&mut self, None => { self.tcx().sess.span_bug( self.span, - format!("No entry for unboxed closure: {}", - closure_def_id.repr(self.tcx())).index(&FullRange)); + &format!("No entry for unboxed closure: {}", + closure_def_id.repr(self.tcx()))[]); } }; @@ -745,7 +745,7 @@ fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option> { debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty)); debug!("searching inherent candidates"); - match self.consider_candidates(self_ty, self.inherent_candidates.index(&FullRange)) { + match self.consider_candidates(self_ty, &self.inherent_candidates[]) { None => {} Some(pick) => { return Some(pick); @@ -753,7 +753,7 @@ fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option> { } debug!("searching extension candidates"); - self.consider_candidates(self_ty, self.extension_candidates.index(&FullRange)) + self.consider_candidates(self_ty, &self.extension_candidates[]) } fn consider_candidates(&self, @@ -768,7 +768,7 @@ fn consider_candidates(&self, debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx())); if applicable_candidates.len() > 1 { - match self.collapse_candidates_to_trait_pick(applicable_candidates.index(&FullRange)) { + match self.collapse_candidates_to_trait_pick(&applicable_candidates[]) { Some(pick) => { return Some(Ok(pick)); } None => { } } @@ -864,7 +864,7 @@ fn collapse_candidates_to_trait_pick(&self, Some(data) => data, None => return None, }; - if probes.index(&(1..)).iter().any(|p| p.to_trait_data() != Some(trait_data)) { + if probes[1..].iter().any(|p| p.to_trait_data() != Some(trait_data)) { return None; } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 9563dd45ca2..c2bad39b78b 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -593,7 +593,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, let tcx = ccx.tcx; let err_count_on_creation = tcx.sess.err_count(); - let arg_tys = fn_sig.inputs.index(&FullRange); + let arg_tys = &fn_sig.inputs[]; let ret_ty = fn_sig.output; debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})", @@ -691,7 +691,7 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) { ast::ItemEnum(ref enum_definition, _) => { check_enum_variants(ccx, it.span, - enum_definition.variants.index(&FullRange), + &enum_definition.variants[], it.id); } ast::ItemFn(ref decl, _, _, _, ref body) => { @@ -985,21 +985,21 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, (&ty::StaticExplicitSelfCategory, _) => { tcx.sess.span_err( impl_m_span, - format!("method `{}` has a `{}` declaration in the impl, \ + &format!("method `{}` has a `{}` declaration in the impl, \ but not in the trait", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &impl_m.explicit_self)).index(&FullRange)); + &impl_m.explicit_self))[]); return; } (_, &ty::StaticExplicitSelfCategory) => { tcx.sess.span_err( impl_m_span, - format!("method `{}` has a `{}` declaration in the trait, \ + &format!("method `{}` has a `{}` declaration in the trait, \ but not in the impl", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &trait_m.explicit_self)).index(&FullRange)); + &trait_m.explicit_self))[]); return; } _ => { @@ -1358,9 +1358,9 @@ fn check_region_bounds_on_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, if trait_params.len() != impl_params.len() { tcx.sess.span_err( span, - format!("lifetime parameters or bounds on method `{}` do \ + &format!("lifetime parameters or bounds on method `{}` do \ not match the trait declaration", - token::get_name(impl_m.name)).index(&FullRange)); + token::get_name(impl_m.name))[]); return false; } @@ -1406,13 +1406,13 @@ fn check_region_bounds_on_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, let err = if missing.len() != 0 || extra.len() != 0 { tcx.sess.span_err( span, - format!( + &format!( "the lifetime parameter `{}` declared in the impl \ has a distinct set of bounds \ from its counterpart `{}` \ declared in the trait", impl_param.name.user_string(tcx), - trait_param.name.user_string(tcx)).index(&FullRange)); + trait_param.name.user_string(tcx))[]); true } else { false @@ -1421,15 +1421,15 @@ fn check_region_bounds_on_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, if missing.len() != 0 { tcx.sess.span_note( span, - format!("the impl is missing the following bounds: `{}`", - missing.user_string(tcx)).index(&FullRange)); + &format!("the impl is missing the following bounds: `{}`", + missing.user_string(tcx))[]); } if extra.len() != 0 { tcx.sess.span_note( span, - format!("the impl has the following extra bounds: `{}`", - extra.user_string(tcx)).index(&FullRange)); + &format!("the impl has the following extra bounds: `{}`", + extra.user_string(tcx))[]); } if err { @@ -1699,8 +1699,8 @@ pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> { None => { self.tcx().sess.span_bug( span, - format!("no type for local variable {}", - nid).index(&FullRange)); + &format!("no type for local variable {}", + nid)[]); } } } @@ -2033,8 +2033,8 @@ pub fn expr_ty(&self, ex: &ast::Expr) -> Ty<'tcx> { match self.inh.node_types.borrow().get(&ex.id) { Some(&t) => t, None => { - self.tcx().sess.bug(format!("no type for expr in fcx {}", - self.tag()).index(&FullRange)); + self.tcx().sess.bug(&format!("no type for expr in fcx {}", + self.tag())[]); } } } @@ -2062,9 +2062,9 @@ pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> { Some(&t) => t, None => { self.tcx().sess.bug( - format!("no type for node {}: {} in fcx {}", + &format!("no type for node {}: {} in fcx {}", id, self.tcx().map.node_to_string(id), - self.tag()).index(&FullRange)); + self.tag())[]); } } } @@ -2466,7 +2466,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, Ok(trait_did) => trait_did, Err(ref err_string) => { fcx.tcx().sess.span_err(iterator_expr.span, - err_string.index(&FullRange)); + &err_string[]); return fcx.tcx().types.err } }; @@ -2490,10 +2490,10 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if !ty::type_is_error(true_expr_type) { let ty_string = fcx.infcx().ty_to_string(true_expr_type); fcx.tcx().sess.span_err(iterator_expr.span, - format!("`for` loop expression has type `{}` which does \ + &format!("`for` loop expression has type `{}` which does \ not implement the `Iterator` trait; \ maybe try .iter()", - ty_string).index(&FullRange)); + ty_string)[]); } fcx.tcx().types.err } @@ -2528,10 +2528,10 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } _ => { fcx.tcx().sess.span_err(iterator_expr.span, - format!("`next` method of the `Iterator` \ + &format!("`next` method of the `Iterator` \ trait has an unexpected type `{}`", fcx.infcx().ty_to_string(return_type)) - .index(&FullRange)); + []); fcx.tcx().types.err } } @@ -2558,7 +2558,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, check_argument_types(fcx, sp, - err_inputs.index(&FullRange), + &err_inputs[], args_no_rcvr, autoref_args, false, @@ -3010,7 +3010,7 @@ fn check_method_call(fcx: &FnCtxt, }; // Call the generic checker. - let args: Vec<_> = args.index(&(1..)).iter().map(|x| x).collect(); + let args: Vec<_> = args[1..].iter().map(|x| x).collect(); let ret_ty = check_method_argument_types(fcx, method_name.span, fn_ty, @@ -3328,7 +3328,7 @@ fn check_field(fcx: &FnCtxt, ty::ty_struct(base_id, substs) => { debug!("struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_field_ty(tcx, base_id, fields.index(&FullRange), + lookup_field_ty(tcx, base_id, &fields[], field.node.name, &(*substs)) } _ => None @@ -3391,7 +3391,7 @@ fn check_tup_field(fcx: &FnCtxt, if tuple_like { debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_tup_field_ty(tcx, base_id, fields.index(&FullRange), + lookup_tup_field_ty(tcx, base_id, &fields[], idx.node, &(*substs)) } else { None @@ -3556,7 +3556,7 @@ fn check_struct_constructor(fcx: &FnCtxt, class_id, id, fcx.ccx.tcx.mk_substs(struct_substs), - class_fields.index(&FullRange), + &class_fields[], fields, base_expr.is_none(), None); @@ -3599,7 +3599,7 @@ fn check_struct_enum_variant(fcx: &FnCtxt, variant_id, id, fcx.ccx.tcx.mk_substs(substitutions), - variant_fields.index(&FullRange), + &variant_fields[], fields, true, Some(enum_id)); @@ -4066,7 +4066,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, let expected = expected.only_has_type(); let flds = expected.map_to_option(fcx, |ty| { match ty.sty { - ty::ty_tup(ref flds) => Some(flds.index(&FullRange)), + ty::ty_tup(ref flds) => Some(&flds[]), _ => None } }); @@ -4100,7 +4100,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, let struct_id = match def { Some(def::DefVariant(enum_id, variant_id, true)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, fields.index(&FullRange)); + variant_id, &fields[]); enum_id } Some(def::DefTrait(def_id)) => { @@ -4109,7 +4109,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields.index(&FullRange), + &fields[], base_expr); def_id }, @@ -4122,7 +4122,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, id, expr.span, struct_did, - fields.index(&FullRange), + &fields[], base_expr.as_ref().map(|e| &**e)); } _ => { @@ -4131,7 +4131,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields.index(&FullRange), + &fields[], base_expr); } } @@ -4164,7 +4164,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, fcx.tcx() .sess .span_err(path.span, - format!("structure constructor specifies a \ + &format!("structure constructor specifies a \ structure of type `{}`, but this \ structure has type `{}`: {}", fcx.infcx() @@ -4172,7 +4172,7 @@ fn check_struct_fields_on_error(fcx: &FnCtxt, fcx.infcx() .ty_to_string( actual_structure_type), - type_error_description).index(&FullRange)); + type_error_description)[]); ty::note_and_explain_type_err(tcx, &type_error); } } @@ -4847,7 +4847,7 @@ fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, } let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id }) - .index(&FullRange).get(0).unwrap_or(&attr::ReprAny); + [].get(0).unwrap_or(&attr::ReprAny); if hint != attr::ReprAny && vs.len() <= 1 { if vs.len() == 1 { @@ -5518,7 +5518,7 @@ fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> { "get_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { tcx.sess.span_fatal(it.span, s.index(&FullRange)); } + Err(s) => { tcx.sess.span_fatal(it.span, &s[]); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { ty: tydesc_ty, @@ -5534,7 +5534,7 @@ fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> { ty::mk_struct(ccx.tcx, did, ccx.tcx.mk_substs(subst::Substs::empty()))), Err(msg) => { - tcx.sess.span_fatal(it.span, msg.index(&FullRange)); + tcx.sess.span_fatal(it.span, &msg[]); } } }, diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index 11ad7bcb091..112e0053642 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -188,8 +188,8 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region { } } _ => { - tcx.sess.bug(format!("unexpected def in region_of_def: {:?}", - def).index(&FullRange)) + tcx.sess.bug(&format!("unexpected def in region_of_def: {:?}", + def)[]) } } } @@ -282,13 +282,13 @@ fn visit_fn_body(&mut self, Some(f) => f, None => { self.tcx().sess.bug( - format!("No fn-sig entry for id={}", id).index(&FullRange)); + &format!("No fn-sig entry for id={}", id)[]); } }; let len = self.region_bound_pairs.len(); - self.relate_free_regions(fn_sig.index(&FullRange), body.id); - link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs.index(&FullRange)); + self.relate_free_regions(&fn_sig[], body.id); + link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[]); self.visit_block(body); self.visit_region_obligations(body.id); self.region_bound_pairs.truncate(len); @@ -629,7 +629,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { } ast::ExprMatch(ref discr, ref arms, _) => { - link_match(rcx, &**discr, arms.index(&FullRange)); + link_match(rcx, &**discr, &arms[]); visit::walk_expr(rcx, expr); } @@ -953,8 +953,8 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, let (m, r) = match self_ty.sty { ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => rcx.tcx().sess.span_bug(deref_expr.span, - format!("bad overloaded deref type {}", - method.ty.repr(rcx.tcx())).index(&FullRange)) + &format!("bad overloaded deref type {}", + method.ty.repr(rcx.tcx()))[]) }; { let mc = mc::MemCategorizationContext::new(rcx.fcx); @@ -1318,9 +1318,9 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, None => { rcx.tcx().sess.span_bug( span, - format!("Illegal upvar id: {}", + &format!("Illegal upvar id: {}", upvar_id.repr( - rcx.tcx())).index(&FullRange)); + rcx.tcx()))[]); } } } diff --git a/src/librustc_typeck/check/regionmanip.rs b/src/librustc_typeck/check/regionmanip.rs index 84d94b0392e..8730858f66e 100644 --- a/src/librustc_typeck/check/regionmanip.rs +++ b/src/librustc_typeck/check/regionmanip.rs @@ -146,8 +146,8 @@ fn accumulate_from_ty(&mut self, ty: Ty<'tcx>) { ty::ty_open(_) => { self.tcx.sess.bug( - format!("Unexpected type encountered while doing wf check: {}", - ty.repr(self.tcx)).index(&FullRange)); + &format!("Unexpected type encountered while doing wf check: {}", + ty.repr(self.tcx))[]); } } } diff --git a/src/librustc_typeck/check/vtable.rs b/src/librustc_typeck/check/vtable.rs index e302609bf22..3940092eb72 100644 --- a/src/librustc_typeck/check/vtable.rs +++ b/src/librustc_typeck/check/vtable.rs @@ -72,17 +72,17 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, (_, &ty::ty_uniq(..)) => { fcx.ccx.tcx.sess.span_err( source_expr.span, - format!("can only cast an boxed pointer \ + &format!("can only cast an boxed pointer \ to a boxed object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange)); + ty::ty_sort_string(fcx.tcx(), source_ty))[]); } (_, &ty::ty_rptr(..)) => { fcx.ccx.tcx.sess.span_err( source_expr.span, - format!("can only cast a &-pointer \ + &format!("can only cast a &-pointer \ to an &-object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange)); + ty::ty_sort_string(fcx.tcx(), source_ty))[]); } _ => { diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index a0f30788bbd..cb52795670f 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -80,8 +80,8 @@ fn get_base_type_def_id<'a, 'tcx>(inference_context: &InferCtxt<'a, 'tcx>, // that the user can type inference_context.tcx.sess.span_bug( span, - format!("coherence encountered unexpected type searching for base type: {}", - ty.repr(inference_context.tcx)).index(&FullRange)); + &format!("coherence encountered unexpected type searching for base type: {}", + ty.repr(inference_context.tcx))[]); } } } @@ -487,18 +487,18 @@ fn check_implementations_of_copy(&self) { Err(ty::FieldDoesNotImplementCopy(name)) => { tcx.sess .span_err(span, - format!("the trait `Copy` may not be \ + &format!("the trait `Copy` may not be \ implemented for this type; field \ `{}` does not implement `Copy`", - token::get_name(name)).index(&FullRange)) + token::get_name(name))[]) } Err(ty::VariantDoesNotImplementCopy(name)) => { tcx.sess .span_err(span, - format!("the trait `Copy` may not be \ + &format!("the trait `Copy` may not be \ implemented for this type; variant \ `{}` does not implement `Copy`", - token::get_name(name)).index(&FullRange)) + token::get_name(name))[]) } Err(ty::TypeIsStructural) => { tcx.sess diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 79e98f15a2d..5902936ae30 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -212,7 +212,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast::TupleVariantKind(ref args) if args.len() > 0 => { let rs = ExplicitRscope; let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect(); - ty::mk_ctor_fn(tcx, variant_def_id, input_tys.index(&FullRange), enum_ty) + ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[], enum_ty) } ast::TupleVariantKind(_) => { @@ -259,7 +259,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items.index(&FullRange), + &trait_items[], &m.id, &m.ident.name, &m.explicit_self, @@ -273,7 +273,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items.index(&FullRange), + &trait_items[], &m.id, &m.pe_ident().name, m.pe_explicit_self(), @@ -779,7 +779,7 @@ fn convert_struct<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, local_def(field.node.id)].ty).collect(); let ctor_fn_ty = ty::mk_ctor_fn(tcx, local_def(ctor_id), - inputs.index(&FullRange), + &inputs[], selfty); write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty); tcx.tcache.borrow_mut().insert(local_def(ctor_id), @@ -819,8 +819,8 @@ fn get_trait_def<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, match ccx.tcx.map.get(trait_id.node) { ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item), _ => { - ccx.tcx.sess.bug(format!("get_trait_def({}): not an item", - trait_id.node).index(&FullRange)) + ccx.tcx.sess.bug(&format!("get_trait_def({}): not an item", + trait_id.node)[]) } } } @@ -845,7 +845,7 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ref s => { tcx.sess.span_bug( it.span, - format!("trait_def_of_item invoked on {:?}", s).index(&FullRange)); + &format!("trait_def_of_item invoked on {:?}", s)[]); } }; @@ -1030,8 +1030,8 @@ fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, -> ty::Generics<'tcx> { ty_generics(ccx, subst::TypeSpace, - generics.lifetimes.index(&FullRange), - generics.ty_params.index(&FullRange), + &generics.lifetimes[], + &generics.ty_params[], ty::Generics::empty(), &generics.where_clause) } @@ -1049,8 +1049,8 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, let mut generics = ty_generics(ccx, subst::TypeSpace, - ast_generics.lifetimes.index(&FullRange), - ast_generics.ty_params.index(&FullRange), + &ast_generics.lifetimes[], + &ast_generics.ty_params[], ty::Generics::empty(), &ast_generics.where_clause); @@ -1135,8 +1135,8 @@ fn ty_generics_for_fn_or_method<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics); ty_generics(ccx, subst::FnSpace, - early_lifetimes.index(&FullRange), - generics.ty_params.index(&FullRange), + &early_lifetimes[], + &generics.ty_params[], base_generics, &generics.where_clause) } @@ -1323,7 +1323,7 @@ fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let param_ty = ty::ParamTy::new(space, index, param.ident.name); let bounds = compute_bounds(ccx, param_ty.to_ty(ccx.tcx), - param.bounds.index(&FullRange), + ¶m.bounds[], SizedByDefault::Yes, param.span); let default = match param.default { @@ -1404,7 +1404,7 @@ fn check_bounds_compatible<'tcx>(tcx: &ty::ctxt<'tcx>, if !param_bounds.builtin_bounds.contains(&ty::BoundSized) { ty::each_bound_trait_and_supertraits( tcx, - param_bounds.trait_bounds.index(&FullRange), + ¶m_bounds.trait_bounds[], |trait_ref| { let trait_def = ty::lookup_trait_def(tcx, trait_ref.def_id()); if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) { diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index ae8731dfa47..800af1ab4ec 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -190,10 +190,10 @@ fn require_same_types<'a, 'tcx, M>(tcx: &ty::ctxt<'tcx>, Ok(_) => true, Err(ref terr) => { tcx.sess.span_err(span, - format!("{}: {}", + &format!("{}: {}", msg(), ty::type_err_to_str(tcx, - terr)).index(&FullRange)); + terr))[]); ty::note_and_explain_type_err(tcx, terr); false } @@ -239,10 +239,10 @@ fn check_main_fn_ty(ccx: &CrateCtxt, } _ => { tcx.sess.span_bug(main_span, - format!("main has a non-function type: found \ + &format!("main has a non-function type: found \ `{}`", ppaux::ty_to_string(tcx, - main_t)).index(&FullRange)); + main_t))[]); } } } @@ -291,9 +291,9 @@ fn check_start_fn_ty(ccx: &CrateCtxt, } _ => { tcx.sess.span_bug(start_span, - format!("start has a non-function type: found \ + &format!("start has a non-function type: found \ `{}`", - ppaux::ty_to_string(tcx, start_t)).index(&FullRange)); + ppaux::ty_to_string(tcx, start_t))[]); } } } diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs index 393ba19ba18..b33921e07e8 100644 --- a/src/librustc_typeck/variance.rs +++ b/src/librustc_typeck/variance.rs @@ -562,9 +562,9 @@ fn inferred_index(&self, param_id: ast::NodeId) -> InferredIndex { match self.terms_cx.inferred_map.get(¶m_id) { Some(&index) => index, None => { - self.tcx().sess.bug(format!( + self.tcx().sess.bug(&format!( "no inferred index entry for {}", - self.tcx().map.node_to_string(param_id)).index(&FullRange)); + self.tcx().map.node_to_string(param_id))[]); } } } @@ -837,9 +837,9 @@ fn add_constraints_from_ty(&mut self, ty::ty_infer(..) | ty::ty_err => { self.tcx().sess.bug( - format!("unexpected type encountered in \ + &format!("unexpected type encountered in \ variance inference: {}", - ty.repr(self.tcx())).index(&FullRange)); + ty.repr(self.tcx()))[]); } } } @@ -917,9 +917,9 @@ fn add_constraints_from_region(&mut self, // regions when visiting member types or method types. self.tcx() .sess - .bug(format!("unexpected region encountered in variance \ + .bug(&format!("unexpected region encountered in variance \ inference: {}", - region.repr(self.tcx())).index(&FullRange)); + region.repr(self.tcx()))[]); } } } @@ -1055,7 +1055,7 @@ fn write(&self) { // attribute and report an error with various results if found. if ty::has_attr(tcx, item_def_id, "rustc_variance") { let found = item_variances.repr(tcx); - tcx.sess.span_err(tcx.map.span(item_id), found.index(&FullRange)); + tcx.sess.span_err(tcx.map.span(item_id), &found[]); } let newly_added = tcx.item_variance_map.borrow_mut() diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index bf2664bba6a..35ae562a475 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -798,7 +798,7 @@ fn clean(&self, cx: &DocContext) -> Item { let all_inputs = &self.pe_fn_decl().inputs; let inputs = match self.pe_explicit_self().node { ast::SelfStatic => all_inputs.as_slice(), - _ => all_inputs.index(&(1..)) + _ => &all_inputs[1..] }; let decl = FnDecl { inputs: Arguments { @@ -836,7 +836,7 @@ impl Clean for ast::TypeMethod { fn clean(&self, cx: &DocContext) -> Item { let inputs = match self.explicit_self.node { ast::SelfStatic => self.decl.inputs.as_slice(), - _ => self.decl.inputs.index(&(1..)) + _ => &self.decl.inputs[1..] }; let decl = FnDecl { inputs: Arguments { @@ -1132,7 +1132,7 @@ fn clean(&self, cx: &DocContext) -> Item { self.fty.sig.clone()), s => { let sig = ty::Binder(ty::FnSig { - inputs: self.fty.sig.0.inputs.index(&(1..)).to_vec(), + inputs: self.fty.sig.0.inputs[1..].to_vec(), ..self.fty.sig.0.clone() }); let s = match s { diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index c7ec687bc1a..23c1b1523d5 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -311,7 +311,7 @@ fn path(w: &mut fmt::Formatter, match rel_root { Some(root) => { let mut root = String::from_str(root.as_slice()); - for seg in path.segments.index(&(0..amt)).iter() { + for seg in path.segments[0..amt].iter() { if "super" == seg.name || "self" == seg.name { try!(write!(w, "{}::", seg.name)); @@ -326,7 +326,7 @@ fn path(w: &mut fmt::Formatter, } } None => { - for seg in path.segments.index(&(0..amt)).iter() { + for seg in path.segments[0..amt].iter() { try!(write!(w, "{}::", seg.name)); } } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 3d2c5e2cbb5..885017152de 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String { class, id, &mut out).unwrap(); - String::from_utf8_lossy(out.index(&FullRange)).into_owned() + String::from_utf8_lossy(&out[]).into_owned() } /// Exhausts the `lexer` writing the output into `out`. diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index fd0b5c55903..2677b96bfb9 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -383,7 +383,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { }; if start < i { - try!(wr.write_str(v.index(&(start..i)))); + try!(wr.write_str(&v[start..i])); } try!(wr.write_str(escaped)); @@ -392,7 +392,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { } if start != v.len() { - try!(wr.write_str(v.index(&(start..)))); + try!(wr.write_str(&v[start..])); } wr.write_str("\"") @@ -401,7 +401,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { fn escape_char(writer: &mut fmt::Writer, v: char) -> fmt::Result { let mut buf = [0; 4]; let n = v.encode_utf8(&mut buf).unwrap(); - let buf = unsafe { str::from_utf8_unchecked(buf.index(&(0..n))) }; + let buf = unsafe { str::from_utf8_unchecked(&buf[0..n]) }; escape_str(writer, buf) } @@ -414,7 +414,7 @@ fn spaces(wr: &mut fmt::Writer, mut n: uint) -> fmt::Result { } if n > 0 { - wr.write_str(BUF.index(&(0..n))) + wr.write_str(&BUF[0..n]) } else { Ok(()) } @@ -623,7 +623,7 @@ fn emit_map_elt_key(&mut self, idx: uint, mut f: F) -> EncodeResult where let mut check_encoder = Encoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } - let out = str::from_utf8(buf.index(&FullRange)).unwrap(); + let out = str::from_utf8(&buf[]).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); @@ -894,7 +894,7 @@ fn emit_map_elt_key(&mut self, idx: uint, mut f: F) -> EncodeResult where let mut check_encoder = PrettyEncoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } - let out = str::from_utf8(buf.index(&FullRange)).unwrap(); + let out = str::from_utf8(&buf[]).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); @@ -1027,7 +1027,7 @@ pub fn is_string<'a>(&'a self) -> bool { /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { - Json::String(ref s) => Some(s.index(&FullRange)), + Json::String(ref s) => Some(&s[]), _ => None } } @@ -1137,7 +1137,7 @@ impl Index for Json { fn index<'a>(&'a self, idx: &uint) -> &'a Json { match self { - &Json::Array(ref v) => v.index(idx), + &Json::Array(ref v) => &v[*idx], _ => panic!("can only index Json with uint if it is an array") } } @@ -1222,7 +1222,7 @@ pub fn get<'l>(&'l self, idx: uint) -> StackElement<'l> { InternalIndex(i) => StackElement::Index(i), InternalKey(start, size) => { StackElement::Key(str::from_utf8( - self.str_buffer.index(&((start as uint) .. (start as uint + size as uint)))) + &self.str_buffer[(start as uint) .. (start as uint + size as uint)]) .unwrap()) } } @@ -1265,7 +1265,7 @@ pub fn top<'l>(&'l self) -> Option> { Some(&InternalIndex(i)) => Some(StackElement::Index(i)), Some(&InternalKey(start, size)) => { Some(StackElement::Key(str::from_utf8( - self.str_buffer.index(&((start as uint) .. (start+size) as uint)) + &self.str_buffer[(start as uint) .. (start+size) as uint] ).unwrap())) } } @@ -2144,7 +2144,7 @@ fn read_enum_variant(&mut self, names: &[&str], return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) } }; - let idx = match names.iter().position(|n| *n == name.index(&FullRange)) { + let idx = match names.iter().position(|n| *n == &name[]) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; @@ -2511,7 +2511,6 @@ mod tests { use std::{i64, u64, f32, f64, io}; use std::collections::BTreeMap; use std::num::Float; - use std::ops::Index; use std::string; #[derive(RustcDecodable, Eq, PartialEq, Show)] @@ -3353,7 +3352,7 @@ fn test_encode_hashmap_with_numeric_key() { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(mem_buf.index(&FullRange)).unwrap(); + let json_str = from_utf8(&mem_buf[]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), _ => {} // it parsed and we are good to go @@ -3369,7 +3368,7 @@ fn test_prettyencode_hashmap_with_numeric_key() { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(mem_buf.index(&FullRange)).unwrap(); + let json_str = from_utf8(&mem_buf[]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), _ => {} // it parsed and we are good to go @@ -3409,7 +3408,7 @@ fn indents(source: &str) -> uint { write!(&mut writer, "{}", super::as_pretty_json(&json).indent(i)).unwrap(); - let printed = from_utf8(writer.index(&FullRange)).unwrap(); + let printed = from_utf8(&writer[]).unwrap(); // Check for indents at each line let lines: Vec<&str> = printed.lines().collect(); diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 7b6ca10669d..fe2d57486a8 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -326,7 +326,7 @@ fn encode(&self, s: &mut S) -> Result<(), S::Error> { impl Encodable for String { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.index(&FullRange)) + s.emit_str(&self[]) } } diff --git a/src/libstd/failure.rs b/src/libstd/failure.rs index 50538d3e43d..dbc88ddf0a0 100644 --- a/src/libstd/failure.rs +++ b/src/libstd/failure.rs @@ -37,7 +37,7 @@ pub fn on_fail(obj: &(Any+Send), file: &'static str, line: uint) { let msg = match obj.downcast_ref::<&'static str>() { Some(s) => *s, None => match obj.downcast_ref::() { - Some(s) => s.index(&FullRange), + Some(s) => &s[], None => "Box", } }; diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 74c503e6f2b..ba13bd05dc5 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -15,7 +15,7 @@ use cmp; use io::{Reader, Writer, Stream, Buffer, DEFAULT_BUF_SIZE, IoResult}; use iter::{IteratorExt, ExactSizeIterator}; -use ops::{Drop, Index}; +use ops::Drop; use option::Option; use option::Option::{Some, None}; use result::Result::Ok; @@ -97,7 +97,7 @@ fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> { self.cap = try!(self.inner.read(self.buf.as_mut_slice())); self.pos = 0; } - Ok(self.buf.index(&(self.pos..self.cap))) + Ok(&self.buf[self.pos..self.cap]) } fn consume(&mut self, amt: uint) { @@ -114,7 +114,7 @@ fn read(&mut self, buf: &mut [u8]) -> IoResult { let nread = { let available = try!(self.fill_buf()); let nread = cmp::min(available.len(), buf.len()); - slice::bytes::copy_memory(buf, available.index(&(0..nread))); + slice::bytes::copy_memory(buf, &available[0..nread]); nread }; self.pos += nread; @@ -168,7 +168,7 @@ pub fn new(inner: W) -> BufferedWriter { fn flush_buf(&mut self) -> IoResult<()> { if self.pos != 0 { - let ret = self.inner.as_mut().unwrap().write(self.buf.index(&(0..self.pos))); + let ret = self.inner.as_mut().unwrap().write(&self.buf[0..self.pos]); self.pos = 0; ret } else { @@ -260,9 +260,9 @@ impl Writer for LineBufferedWriter { fn write(&mut self, buf: &[u8]) -> IoResult<()> { match buf.iter().rposition(|&b| b == b'\n') { Some(i) => { - try!(self.inner.write(buf.index(&(0..(i + 1))))); + try!(self.inner.write(&buf[0..(i + 1)])); try!(self.inner.flush()); - try!(self.inner.write(buf.index(&((i + 1)..)))); + try!(self.inner.write(&buf[(i + 1)..])); Ok(()) } None => self.inner.write(buf), @@ -510,7 +510,7 @@ fn test_buffered_writer_inner_flushes() { assert_eq!(a, &w.get_ref()[]); let w = w.into_inner(); let a: &[_] = &[0, 1]; - assert_eq!(a, w.index(&FullRange)); + assert_eq!(a, &w[]); } // This is just here to make sure that we don't infinite loop in the @@ -607,14 +607,14 @@ fn test_short_reads() { #[test] fn read_char_buffered() { let buf = [195u8, 159u8]; - let mut reader = BufferedReader::with_capacity(1, buf.index(&FullRange)); + let mut reader = BufferedReader::with_capacity(1, &buf[]); assert_eq!(reader.read_char(), Ok('ß')); } #[test] fn test_chars() { let buf = [195u8, 159u8, b'a']; - let mut reader = BufferedReader::with_capacity(1, buf.index(&FullRange)); + let mut reader = BufferedReader::with_capacity(1, &buf[]); let mut it = reader.chars(); assert_eq!(it.next(), Some(Ok('ß'))); assert_eq!(it.next(), Some(Ok('a'))); diff --git a/src/libstd/io/comm_adapters.rs b/src/libstd/io/comm_adapters.rs index bce097e17ef..b578f4d5adc 100644 --- a/src/libstd/io/comm_adapters.rs +++ b/src/libstd/io/comm_adapters.rs @@ -13,7 +13,6 @@ use sync::mpsc::{Sender, Receiver}; use io; use option::Option::{None, Some}; -use ops::Index; use result::Result::{Ok, Err}; use slice::{bytes, SliceExt}; use super::{Buffer, Reader, Writer, IoResult}; @@ -91,7 +90,7 @@ fn read(&mut self, buf: &mut [u8]) -> IoResult { Some(src) => { let dst = buf.slice_from_mut(num_read); let count = cmp::min(src.len(), dst.len()); - bytes::copy_memory(dst, src.index(&(0..count))); + bytes::copy_memory(dst, &src[0..count]); count }, None => 0, diff --git a/src/libstd/io/fs.rs b/src/libstd/io/fs.rs index eadca8e42e5..dbccc81c4cc 100644 --- a/src/libstd/io/fs.rs +++ b/src/libstd/io/fs.rs @@ -889,7 +889,7 @@ fn file_test_io_smoke_test() { let mut read_buf = [0; 1028]; let read_str = match check!(read_stream.read(&mut read_buf)) { -1|0 => panic!("shouldn't happen"), - n => str::from_utf8(read_buf.index(&(0..n))).unwrap().to_string() + n => str::from_utf8(&read_buf[0..n]).unwrap().to_string() }; assert_eq!(read_str.as_slice(), message); } diff --git a/src/libstd/io/mem.rs b/src/libstd/io/mem.rs index 9a6ad04fdbc..c5e289398e0 100644 --- a/src/libstd/io/mem.rs +++ b/src/libstd/io/mem.rs @@ -13,7 +13,6 @@ //! Readers and Writers for in-memory buffers use cmp::min; -use ops::Index; use option::Option::None; use result::Result::{Err, Ok}; use io; @@ -160,7 +159,7 @@ fn read(&mut self, buf: &mut [u8]) -> IoResult { let write_len = min(buf.len(), self.buf.len() - self.pos); { - let input = self.buf.index(&(self.pos.. (self.pos + write_len))); + let input = &self.buf[self.pos.. (self.pos + write_len)]; let output = buf.slice_to_mut(write_len); assert_eq!(input.len(), output.len()); slice::bytes::copy_memory(output, input); @@ -188,7 +187,7 @@ impl Buffer for MemReader { #[inline] fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> { if self.pos < self.buf.len() { - Ok(self.buf.index(&(self.pos..))) + Ok(&self.buf[self.pos..]) } else { Err(io::standard_error(io::EndOfFile)) } @@ -205,7 +204,7 @@ fn read(&mut self, buf: &mut [u8]) -> IoResult { let write_len = min(buf.len(), self.len()); { - let input = self.index(&(0..write_len)); + let input = &self[0..write_len]; let output = buf.slice_to_mut(write_len); slice::bytes::copy_memory(output, input); } @@ -228,7 +227,7 @@ fn fill_buf(&mut self) -> IoResult<&[u8]> { #[inline] fn consume(&mut self, amt: uint) { - *self = self.index(&(amt..)); + *self = &self[amt..]; } } @@ -287,7 +286,7 @@ fn write(&mut self, src: &[u8]) -> IoResult<()> { Ok(()) } else { - slice::bytes::copy_memory(dst, src.index(&(0..dst_len))); + slice::bytes::copy_memory(dst, &src[0..dst_len]); self.pos += dst_len; @@ -350,7 +349,7 @@ fn read(&mut self, buf: &mut [u8]) -> IoResult { let write_len = min(buf.len(), self.buf.len() - self.pos); { - let input = self.buf.index(&(self.pos.. (self.pos + write_len))); + let input = &self.buf[self.pos.. (self.pos + write_len)]; let output = buf.slice_to_mut(write_len); assert_eq!(input.len(), output.len()); slice::bytes::copy_memory(output, input); @@ -378,7 +377,7 @@ impl<'a> Buffer for BufReader<'a> { #[inline] fn fill_buf(&mut self) -> IoResult<&[u8]> { if self.pos < self.buf.len() { - Ok(self.buf.index(&(self.pos..))) + Ok(&self.buf[self.pos..]) } else { Err(io::standard_error(io::EndOfFile)) } @@ -393,7 +392,7 @@ mod test { extern crate "test" as test_crate; use io::{SeekSet, SeekCur, SeekEnd, Reader, Writer, Seek}; use prelude::v1::{Ok, Err, range, Vec, Buffer, AsSlice, SliceExt}; - use prelude::v1::{IteratorExt, Index}; + use prelude::v1::IteratorExt; use io; use iter::repeat; use self::test_crate::Bencher; @@ -499,7 +498,7 @@ fn test_mem_reader() { assert_eq!(buf, b); assert_eq!(reader.read(&mut buf), Ok(3)); let b: &[_] = &[5, 6, 7]; - assert_eq!(buf.index(&(0..3)), b); + assert_eq!(&buf[0..3], b); assert!(reader.read(&mut buf).is_err()); let mut reader = MemReader::new(vec!(0, 1, 2, 3, 4, 5, 6, 7)); assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3)); @@ -525,7 +524,7 @@ fn test_slice_reader() { assert_eq!(buf.as_slice(), b); assert_eq!(reader.read(&mut buf), Ok(3)); let b: &[_] = &[5, 6, 7]; - assert_eq!(buf.index(&(0..3)), b); + assert_eq!(&buf[0..3], b); assert!(reader.read(&mut buf).is_err()); let mut reader = &mut in_buf.as_slice(); assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3)); @@ -552,7 +551,7 @@ fn test_buf_reader() { assert_eq!(buf, b); assert_eq!(reader.read(&mut buf), Ok(3)); let b: &[_] = &[5, 6, 7]; - assert_eq!(buf.index(&(0..3)), b); + assert_eq!(&buf[0..3], b); assert!(reader.read(&mut buf).is_err()); let mut reader = BufReader::new(in_buf.as_slice()); assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3)); diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index 2595a3c44a8..c71ab514695 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -236,7 +236,7 @@ use iter::{Iterator, IteratorExt}; use marker::Sized; use mem::transmute; -use ops::{FnOnce, Index}; +use ops::FnOnce; use option::Option; use option::Option::{Some, None}; use os; @@ -1069,7 +1069,7 @@ fn write_line(&mut self, s: &str) -> IoResult<()> { fn write_char(&mut self, c: char) -> IoResult<()> { let mut buf = [0u8; 4]; let n = c.encode_utf8(buf.as_mut_slice()).unwrap_or(0); - self.write(buf.index(&(0..n))) + self.write(&buf[0..n]) } /// Write the result of passing n through `int::to_str_bytes`. @@ -1454,7 +1454,7 @@ fn read_until(&mut self, byte: u8) -> IoResult> { }; match available.iter().position(|&b| b == byte) { Some(i) => { - res.push_all(available.index(&(0..(i + 1)))); + res.push_all(&available[0..(i + 1)]); used = i + 1; break } @@ -1493,7 +1493,7 @@ fn read_char(&mut self) -> IoResult { } } } - match str::from_utf8(buf.index(&(0..width))).ok() { + match str::from_utf8(&buf[0..width]).ok() { Some(s) => Ok(s.char_at(0)), None => Err(standard_error(InvalidInput)) } diff --git a/src/libstd/io/net/ip.rs b/src/libstd/io/net/ip.rs index b9f653f86c2..d09afea94dc 100644 --- a/src/libstd/io/net/ip.rs +++ b/src/libstd/io/net/ip.rs @@ -22,7 +22,7 @@ use io::{self, IoResult, IoError}; use io::net; use iter::{Iterator, IteratorExt}; -use ops::{FnOnce, FnMut, Index}; +use ops::{FnOnce, FnMut}; use option::Option; use option::Option::{None, Some}; use result::Result::{Ok, Err}; @@ -313,7 +313,7 @@ fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> IpAddr { let mut tail = [0u16; 8]; let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size); - Some(ipv6_addr_from_head_tail(head.index(&(0..head_size)), tail.index(&(0..tail_size)))) + Some(ipv6_addr_from_head_tail(&head[0..head_size], &tail[0..tail_size])) } fn read_ipv6_addr(&mut self) -> Option { diff --git a/src/libstd/io/util.rs b/src/libstd/io/util.rs index c0254a3e7a2..5a7219495f5 100644 --- a/src/libstd/io/util.rs +++ b/src/libstd/io/util.rs @@ -59,7 +59,7 @@ fn read(&mut self, buf: &mut [u8]) -> io::IoResult { impl Buffer for LimitReader { fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> { let amt = try!(self.inner.fill_buf()); - let buf = amt.index(&(0..cmp::min(amt.len(), self.limit))); + let buf = &amt[0..cmp::min(amt.len(), self.limit)]; if buf.len() == 0 { Err(io::standard_error(io::EndOfFile)) } else { @@ -220,7 +220,7 @@ pub fn into_inner(self) -> (R, W) { impl Reader for TeeReader { fn read(&mut self, buf: &mut [u8]) -> io::IoResult { self.reader.read(buf).and_then(|len| { - self.writer.write(buf.index_mut(&(0..len))).map(|()| len) + self.writer.write(&mut buf[0..len]).map(|()| len) }) } } @@ -234,7 +234,7 @@ pub fn copy(r: &mut R, w: &mut W) -> io::IoResult<()> { Err(ref e) if e.kind == io::EndOfFile => return Ok(()), Err(e) => return Err(e), }; - try!(w.write(buf.index(&(0..len)))); + try!(w.write(&buf[0..len])); } } diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index 581969e98fb..0d7c213cd96 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -68,7 +68,7 @@ use iter::IteratorExt; use option::Option; use option::Option::{None, Some}; -use ops::{FullRange, Index}; +use ops::FullRange; use str; use str::StrExt; use string::{String, CowString}; @@ -352,7 +352,7 @@ fn filestem<'a>(&'a self) -> Option<&'a [u8]> { match name.rposition_elem(&dot) { None | Some(0) => name, Some(1) if name == b".." => name, - Some(pos) => name.index(&(0..pos)) + Some(pos) => &name[0..pos] } }) } @@ -399,7 +399,7 @@ fn extension<'a>(&'a self) -> Option<&'a [u8]> { match name.rposition_elem(&dot) { None | Some(0) => None, Some(1) if name == b".." => None, - Some(pos) => Some(name.index(&((pos+1)..))) + Some(pos) => Some(&name[(pos+1)..]) } } } @@ -475,7 +475,7 @@ fn set_extension(&mut self, extension: T) { let extlen = extension.container_as_bytes().len(); match (name.rposition_elem(&dot), extlen) { (None, 0) | (Some(0), 0) => None, - (Some(idx), 0) => Some(name.index(&(0..idx)).to_vec()), + (Some(idx), 0) => Some(name[0..idx].to_vec()), (idx, extlen) => { let idx = match idx { None | Some(0) => name.len(), @@ -484,7 +484,7 @@ fn set_extension(&mut self, extension: T) { let mut v; v = Vec::with_capacity(idx + extlen + 1); - v.push_all(name.index(&(0..idx))); + v.push_all(&name[0..idx]); v.push(dot); v.push_all(extension.container_as_bytes()); Some(v) @@ -877,7 +877,7 @@ fn container_as_bytes(&self) -> &[u8] { } #[inline] fn container_as_str(&self) -> Option<&str> { - Some(self.index(&FullRange)) + Some(&self[]) } #[inline] fn is_str(_: Option<&String>) -> bool { true } @@ -893,7 +893,7 @@ fn container_as_bytes(&self) -> &[u8] { impl BytesContainer for Vec { #[inline] fn container_as_bytes(&self) -> &[u8] { - self.index(&FullRange) + &self[] } } diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs index 0b7dc19fcab..708fe724cb1 100644 --- a/src/libstd/path/posix.rs +++ b/src/libstd/path/posix.rs @@ -17,7 +17,6 @@ use io::Writer; use iter::{AdditiveIterator, Extend}; use iter::{Iterator, IteratorExt, Map}; -use ops::Index; use marker::Sized; use option::Option::{self, Some, None}; use slice::{AsSlice, Split, SliceExt, SliceConcatExt}; @@ -127,7 +126,7 @@ unsafe fn set_filename_unchecked(&mut self, filename: T) { None => { self.repr = Path::normalize(filename); } - Some(idx) if self.repr.index(&((idx+1)..)) == b".." => { + Some(idx) if &self.repr[(idx+1)..] == b".." => { let mut v = Vec::with_capacity(self.repr.len() + 1 + filename.len()); v.push_all(self.repr.as_slice()); v.push(SEP_BYTE); @@ -137,7 +136,7 @@ unsafe fn set_filename_unchecked(&mut self, filename: T) { } Some(idx) => { let mut v = Vec::with_capacity(idx + 1 + filename.len()); - v.push_all(self.repr.index(&(0..(idx+1)))); + v.push_all(&self.repr[0..(idx+1)]); v.push_all(filename); // FIXME: this is slow self.repr = Path::normalize(v.as_slice()); @@ -178,9 +177,9 @@ fn dirname<'a>(&'a self) -> &'a [u8] { match self.sepidx { None if b".." == self.repr => self.repr.as_slice(), None => dot_static, - Some(0) => self.repr.index(&(0..1)), - Some(idx) if self.repr.index(&((idx+1)..)) == b".." => self.repr.as_slice(), - Some(idx) => self.repr.index(&(0..idx)) + Some(0) => &self.repr[0..1], + Some(idx) if &self.repr[(idx+1)..] == b".." => self.repr.as_slice(), + Some(idx) => &self.repr[0..idx] } } @@ -189,9 +188,9 @@ fn filename<'a>(&'a self) -> Option<&'a [u8]> { None if b"." == self.repr || b".." == self.repr => None, None => Some(self.repr.as_slice()), - Some(idx) if self.repr.index(&((idx+1)..)) == b".." => None, - Some(0) if self.repr.index(&(1..)).is_empty() => None, - Some(idx) => Some(self.repr.index(&((idx+1)..))) + Some(idx) if &self.repr[(idx+1)..] == b".." => None, + Some(0) if self.repr[1..].is_empty() => None, + Some(idx) => Some(&self.repr[(idx+1)..]) } } @@ -333,7 +332,7 @@ fn normalize>(v: &V) -> Vec { // borrowck is being very picky let val = { let is_abs = !v.as_slice().is_empty() && v.as_slice()[0] == SEP_BYTE; - let v_ = if is_abs { v.as_slice().index(&(1..)) } else { v.as_slice() }; + let v_ = if is_abs { &v.as_slice()[1..] } else { v.as_slice() }; let comps = normalize_helper(v_, is_abs); match comps { None => None, @@ -372,7 +371,7 @@ fn normalize>(v: &V) -> Vec { /// A path of "/" yields no components. A path of "." yields one component. pub fn components<'a>(&'a self) -> Components<'a> { let v = if self.repr[0] == SEP_BYTE { - self.repr.index(&(1..)) + &self.repr[1..] } else { self.repr.as_slice() }; let is_sep_byte: fn(&u8) -> bool = is_sep_byte; // coerce to fn ptr let mut ret = v.split(is_sep_byte); diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs index 5c4e7aa9ac2..dee511bc097 100644 --- a/src/libstd/path/windows.rs +++ b/src/libstd/path/windows.rs @@ -25,7 +25,7 @@ use iter::{Iterator, IteratorExt, Map, repeat}; use mem; use option::Option::{self, Some, None}; -use ops::{FullRange, Index}; +use ops::FullRange; use slice::{SliceExt, SliceConcatExt}; use str::{SplitTerminator, FromStr, StrExt}; use string::{String, ToString}; @@ -173,30 +173,30 @@ unsafe fn set_filename_unchecked(&mut self, filename: T) { s.push_str(".."); s.push(SEP); s.push_str(filename); - self.update_normalized(s.index(&FullRange)); + self.update_normalized(&s[]); } None => { self.update_normalized(filename); } - Some((_,idxa,end)) if self.repr.index(&(idxa..end)) == ".." => { + Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => { let mut s = String::with_capacity(end + 1 + filename.len()); - s.push_str(self.repr.index(&(0..end))); + s.push_str(&self.repr[0..end]); s.push(SEP); s.push_str(filename); - self.update_normalized(s.index(&FullRange)); + self.update_normalized(&s[]); } Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => { let mut s = String::with_capacity(idxb + filename.len()); - s.push_str(self.repr.index(&(0..idxb))); + s.push_str(&self.repr[0..idxb]); s.push_str(filename); - self.update_normalized(s.index(&FullRange)); + self.update_normalized(&s[]); } Some((idxb,_,_)) => { let mut s = String::with_capacity(idxb + 1 + filename.len()); - s.push_str(self.repr.index(&(0..idxb))); + s.push_str(&self.repr[0..idxb]); s.push(SEP); s.push_str(filename); - self.update_normalized(s.index(&FullRange)); + self.update_normalized(&s[]); } } } @@ -215,12 +215,12 @@ unsafe fn push_unchecked(&mut self, path: T) { let path = path.container_as_str().unwrap(); fn is_vol_abs(path: &str, prefix: Option) -> bool { // assume prefix is Some(DiskPrefix) - let rest = path.index(&(prefix_len(prefix)..)); + let rest = &path[prefix_len(prefix)..]; !rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char) } fn shares_volume(me: &Path, path: &str) -> bool { // path is assumed to have a prefix of Some(DiskPrefix) - let repr = me.repr.index(&FullRange); + let repr = &me.repr[]; match me.prefix { Some(DiskPrefix) => { repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase() @@ -252,7 +252,7 @@ fn append_path(me: &mut Path, path: &str) { else { None }; let pathlen = path_.as_ref().map_or(path.len(), |p| p.len()); let mut s = String::with_capacity(me.repr.len() + 1 + pathlen); - s.push_str(me.repr.index(&FullRange)); + s.push_str(&me.repr[]); let plen = me.prefix_len(); // if me is "C:" we don't want to add a path separator match me.prefix { @@ -264,9 +264,9 @@ fn append_path(me: &mut Path, path: &str) { } match path_ { None => s.push_str(path), - Some(p) => s.push_str(p.index(&FullRange)), + Some(p) => s.push_str(&p[]), }; - me.update_normalized(s.index(&FullRange)) + me.update_normalized(&s[]) } if !path.is_empty() { @@ -274,7 +274,7 @@ fn append_path(me: &mut Path, path: &str) { match prefix { Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => { // cwd-relative path, self is on the same volume - append_path(self, path.index(&(prefix_len(prefix)..))); + append_path(self, &path[prefix_len(prefix)..]); } Some(_) => { // absolute path, or cwd-relative and self is not same volume @@ -320,7 +320,7 @@ fn new_opt(path: T) -> Option { /// Always returns a `Some` value. #[inline] fn as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.repr.index(&FullRange)) + Some(&self.repr[]) } #[inline] @@ -342,21 +342,21 @@ fn dirname<'a>(&'a self) -> &'a [u8] { /// Always returns a `Some` value. fn dirname_str<'a>(&'a self) -> Option<&'a str> { Some(match self.sepidx_or_prefix_len() { - None if ".." == self.repr => self.repr.index(&FullRange), + None if ".." == self.repr => &self.repr[], None => ".", - Some((_,idxa,end)) if self.repr.index(&(idxa..end)) == ".." => { - self.repr.index(&FullRange) + Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => { + &self.repr[] } - Some((idxb,_,end)) if self.repr.index(&(idxb..end)) == "\\" => { - self.repr.index(&FullRange) + Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => { + &self.repr[] } - Some((0,idxa,_)) => self.repr.index(&(0..idxa)), + Some((0,idxa,_)) => &self.repr[0..idxa], Some((idxb,idxa,_)) => { match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => { - self.repr.index(&(0..idxa)) + &self.repr[0..idxa] } - _ => self.repr.index(&(0..idxb)) + _ => &self.repr[0..idxb] } } }) @@ -370,13 +370,13 @@ fn filename<'a>(&'a self) -> Option<&'a [u8]> { /// See `GenericPath::filename_str` for info. /// Always returns a `Some` value if `filename` returns a `Some` value. fn filename_str<'a>(&'a self) -> Option<&'a str> { - let repr = self.repr.index(&FullRange); + let repr = &self.repr[]; match self.sepidx_or_prefix_len() { None if "." == repr || ".." == repr => None, None => Some(repr), - Some((_,idxa,end)) if repr.index(&(idxa..end)) == ".." => None, + Some((_,idxa,end)) if &repr[idxa..end] == ".." => None, Some((_,idxa,end)) if idxa == end => None, - Some((_,idxa,end)) => Some(repr.index(&(idxa..end))) + Some((_,idxa,end)) => Some(&repr[idxa..end]) } } @@ -408,7 +408,7 @@ fn pop(&mut self) -> bool { true } Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false, - Some((idxb,_,end)) if self.repr.index(&(idxb..end)) == "\\" => false, + Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => false, Some((idxb,idxa,_)) => { let trunc = match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => { @@ -428,15 +428,15 @@ fn root_path(&self) -> Option { if self.prefix.is_some() { Some(Path::new(match self.prefix { Some(DiskPrefix) if self.is_absolute() => { - self.repr.index(&(0..(self.prefix_len()+1))) + &self.repr[0..(self.prefix_len()+1)] } Some(VerbatimDiskPrefix) => { - self.repr.index(&(0..(self.prefix_len()+1))) + &self.repr[0..(self.prefix_len()+1)] } - _ => self.repr.index(&(0..self.prefix_len())) + _ => &self.repr[0..self.prefix_len()] })) } else if is_vol_relative(self) { - Some(Path::new(self.repr.index(&(0..1)))) + Some(Path::new(&self.repr[0..1])) } else { None } @@ -455,7 +455,7 @@ fn root_path(&self) -> Option { fn is_absolute(&self) -> bool { match self.prefix { Some(DiskPrefix) => { - let rest = self.repr.index(&(self.prefix_len()..)); + let rest = &self.repr[self.prefix_len()..]; rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE } Some(_) => true, @@ -630,15 +630,15 @@ pub fn new_opt(path: T) -> Option { /// Does not distinguish between absolute and cwd-relative paths, e.g. /// C:\foo and C:foo. pub fn str_components<'a>(&'a self) -> StrComponents<'a> { - let repr = self.repr.index(&FullRange); + let repr = &self.repr[]; let s = match self.prefix { Some(_) => { let plen = self.prefix_len(); if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE { - repr.index(&((plen+1)..)) - } else { repr.index(&(plen..)) } + &repr[(plen+1)..] + } else { &repr[plen..] } } - None if repr.as_bytes()[0] == SEP_BYTE => repr.index(&(1..)), + None if repr.as_bytes()[0] == SEP_BYTE => &repr[1..], None => repr }; let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr @@ -658,8 +658,8 @@ fn convert<'a>(x: Option<&'a str>) -> &'a [u8] { } fn equiv_prefix(&self, other: &Path) -> bool { - let s_repr = self.repr.index(&FullRange); - let o_repr = other.repr.index(&FullRange); + let s_repr = &self.repr[]; + let o_repr = &other.repr[]; match (self.prefix, other.prefix) { (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => { self.is_absolute() && @@ -676,14 +676,14 @@ fn equiv_prefix(&self, other: &Path) -> bool { o_repr.as_bytes()[4].to_ascii_lowercase() } (Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => { - s_repr.index(&(2..self.prefix_len())) == o_repr.index(&(8..other.prefix_len())) + &s_repr[2..self.prefix_len()] == &o_repr[8..other.prefix_len()] } (Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => { - s_repr.index(&(8..self.prefix_len())) == o_repr.index(&(2..other.prefix_len())) + &s_repr[8..self.prefix_len()] == &o_repr[2..other.prefix_len()] } (None, None) => true, (a, b) if a == b => { - s_repr.index(&(0..self.prefix_len())) == o_repr.index(&(0..other.prefix_len())) + &s_repr[0..self.prefix_len()] == &o_repr[0..other.prefix_len()] } _ => false } @@ -737,7 +737,7 @@ fn normalize__(s: &str, prefix: Option) -> Option { match prefix.unwrap() { DiskPrefix => { let len = prefix_len(prefix) + is_abs as uint; - let mut s = String::from_str(s.index(&(0..len))); + let mut s = String::from_str(&s[0..len]); unsafe { let v = s.as_mut_vec(); v[0] = (*v)[0].to_ascii_uppercase(); @@ -752,7 +752,7 @@ fn normalize__(s: &str, prefix: Option) -> Option { } VerbatimDiskPrefix => { let len = prefix_len(prefix) + is_abs as uint; - let mut s = String::from_str(s.index(&(0..len))); + let mut s = String::from_str(&s[0..len]); unsafe { let v = s.as_mut_vec(); v[4] = (*v)[4].to_ascii_uppercase(); @@ -762,14 +762,14 @@ fn normalize__(s: &str, prefix: Option) -> Option { _ => { let plen = prefix_len(prefix); if s.len() > plen { - Some(String::from_str(s.index(&(0..plen)))) + Some(String::from_str(&s[0..plen])) } else { None } } } } else if is_abs && comps.is_empty() { Some(repeat(SEP).take(1).collect()) } else { - let prefix_ = s.index(&(0..prefix_len(prefix))); + let prefix_ = &s[0..prefix_len(prefix)]; let n = prefix_.len() + if is_abs { comps.len() } else { comps.len() - 1} + comps.iter().map(|v| v.len()).sum(); @@ -780,15 +780,15 @@ fn normalize__(s: &str, prefix: Option) -> Option { s.push(':'); } Some(VerbatimDiskPrefix) => { - s.push_str(prefix_.index(&(0..4))); + s.push_str(&prefix_[0..4]); s.push(prefix_.as_bytes()[4].to_ascii_uppercase() as char); - s.push_str(prefix_.index(&(5..))); + s.push_str(&prefix_[5..]); } Some(UNCPrefix(a,b)) => { s.push_str("\\\\"); - s.push_str(prefix_.index(&(2..(a+2)))); + s.push_str(&prefix_[2..(a+2)]); s.push(SEP); - s.push_str(prefix_.index(&((3+a)..(3+a+b)))); + s.push_str(&prefix_[(3+a)..(3+a+b)]); } Some(_) => s.push_str(prefix_), None => () @@ -813,8 +813,8 @@ fn normalize__(s: &str, prefix: Option) -> Option { fn update_sepidx(&mut self) { let s = if self.has_nonsemantic_trailing_slash() { - self.repr.index(&(0..(self.repr.len()-1))) - } else { self.repr.index(&FullRange) }; + &self.repr[0..(self.repr.len()-1)] + } else { &self.repr[] }; let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) { is_sep } else { @@ -893,17 +893,17 @@ pub fn is_verbatim(path: &Path) -> bool { /// non-verbatim, the non-verbatim version is returned. /// Otherwise, None is returned. pub fn make_non_verbatim(path: &Path) -> Option { - let repr = path.repr.index(&FullRange); + let repr = &path.repr[]; let new_path = match path.prefix { Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None, Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()), Some(VerbatimDiskPrefix) => { // \\?\D:\ - Path::new(repr.index(&(4..))) + Path::new(&repr[4..]) } Some(VerbatimUNCPrefix(_,_)) => { // \\?\UNC\server\share - Path::new(format!(r"\{}", repr.index(&(7..)))) + Path::new(format!(r"\{}", &repr[7..])) } }; if new_path.prefix.is_none() { @@ -912,8 +912,7 @@ pub fn make_non_verbatim(path: &Path) -> Option { return None; } // now ensure normalization didn't change anything - if repr.index(&(path.prefix_len()..)) == - new_path.repr.index(&(new_path.prefix_len()..)) { + if &repr[path.prefix_len()..] == &new_path.repr[new_path.prefix_len()..] { Some(new_path) } else { None @@ -978,13 +977,13 @@ pub enum PathPrefix { fn parse_prefix<'a>(mut path: &'a str) -> Option { if path.starts_with("\\\\") { // \\ - path = path.index(&(2..)); + path = &path[2..]; if path.starts_with("?\\") { // \\?\ - path = path.index(&(2..)); + path = &path[2..]; if path.starts_with("UNC\\") { // \\?\UNC\server\share - path = path.index(&(4..)); + path = &path[4..]; let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) { Some(x) => x, None => (path.len(), 0) @@ -1005,7 +1004,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option { } } else if path.starts_with(".\\") { // \\.\path - path = path.index(&(2..)); + path = &path[2..]; let idx = path.find('\\').unwrap_or(path.len()); return Some(DeviceNSPrefix(idx)); } @@ -1030,7 +1029,7 @@ fn parse_two_comps(mut path: &str, f: fn(char) -> bool) -> Option<(uint, uint)> None => return None, Some(x) => x }; - path = path.index(&((idx_a+1)..)); + path = &path[(idx_a+1)..]; let idx_b = path.find(f).unwrap_or(path.len()); Some((idx_a, idx_b)) } @@ -1044,8 +1043,8 @@ fn normalize_helper<'a>(s: &'a str, prefix: Option) -> (bool, Option is_sep_verbatim }; let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix))); - let s_ = s.index(&(prefix_len(prefix)..)); - let s_ = if is_abs { s_.index(&(1..)) } else { s_ }; + let s_ = &s[prefix_len(prefix)..]; + let s_ = if is_abs { &s_[1..] } else { s_ }; if is_abs && s_.is_empty() { return (is_abs, match prefix { diff --git a/src/libstd/prelude/v1.rs b/src/libstd/prelude/v1.rs index dcb342b9ca2..d9c942c0185 100644 --- a/src/libstd/prelude/v1.rs +++ b/src/libstd/prelude/v1.rs @@ -17,7 +17,7 @@ #[stable] #[doc(no_inline)] pub use ops::{Drop, Fn, FnMut, FnOnce}; // TEMPORARY -#[unstable] #[doc(no_inline)] pub use ops::{Index, IndexMut, FullRange}; +#[unstable] #[doc(no_inline)] pub use ops::FullRange; // Reexported functions #[stable] #[doc(no_inline)] pub use mem::drop; diff --git a/src/libstd/rt/unwind.rs b/src/libstd/rt/unwind.rs index fd84f220942..03876189da9 100644 --- a/src/libstd/rt/unwind.rs +++ b/src/libstd/rt/unwind.rs @@ -544,7 +544,7 @@ fn begin_unwind_inner(msg: Box, file_line: &(&'static str, uint)) -> // MAX_CALLBACKS, so we're sure to clamp it as necessary. let callbacks = { let amt = CALLBACK_CNT.load(Ordering::SeqCst); - CALLBACKS.index(&(0..cmp::min(amt, MAX_CALLBACKS))) + &CALLBACKS[0..cmp::min(amt, MAX_CALLBACKS)] }; for cb in callbacks.iter() { match cb.load(Ordering::SeqCst) { diff --git a/src/libstd/rt/util.rs b/src/libstd/rt/util.rs index 59f654a95ca..c076f0a7c6c 100644 --- a/src/libstd/rt/util.rs +++ b/src/libstd/rt/util.rs @@ -131,7 +131,7 @@ struct BufWriter<'a> { impl<'a> fmt::Writer for BufWriter<'a> { fn write_str(&mut self, bytes: &str) -> fmt::Result { let left = self.buf.slice_from_mut(self.pos); - let to_write = bytes.as_bytes().index(&(0..cmp::min(bytes.len(), left.len()))); + let to_write = &bytes.as_bytes()[0..cmp::min(bytes.len(), left.len())]; slice::bytes::copy_memory(left, to_write); self.pos += to_write.len(); Ok(()) @@ -142,7 +142,7 @@ fn write_str(&mut self, bytes: &str) -> fmt::Result { let mut msg = [0u8; 512]; let mut w = BufWriter { buf: &mut msg, pos: 0 }; let _ = write!(&mut w, "{}", args); - let msg = str::from_utf8(w.buf.index_mut(&(0..w.pos))).unwrap_or("aborted"); + let msg = str::from_utf8(&w.buf[0..w.pos]).unwrap_or("aborted"); let msg = if msg.is_empty() {"aborted"} else {msg}; // Give some context to the message diff --git a/src/libstd/sys/common/net.rs b/src/libstd/sys/common/net.rs index 902942d7244..4cf891ac498 100644 --- a/src/libstd/sys/common/net.rs +++ b/src/libstd/sys/common/net.rs @@ -469,7 +469,7 @@ pub fn write(fd: sock_t, // Also as with read(), we use MSG_DONTWAIT to guard ourselves // against unforeseen circumstances. let _guard = lock(); - let ptr = buf.index(&(written..)).as_ptr(); + let ptr = buf[written..].as_ptr(); let len = buf.len() - written; match retry(|| write(deadline.is_some(), ptr, len)) { -1 if wouldblock() => {} diff --git a/src/libstd/sys/windows/backtrace.rs b/src/libstd/sys/windows/backtrace.rs index eb76f13afe7..ee2dd14955b 100644 --- a/src/libstd/sys/windows/backtrace.rs +++ b/src/libstd/sys/windows/backtrace.rs @@ -362,7 +362,7 @@ macro_rules! sym{ ($e:expr, $t:ident) => (unsafe { let bytes = unsafe { ffi::c_str_to_bytes(&ptr) }; match str::from_utf8(bytes) { Ok(s) => try!(demangle(w, s)), - Err(..) => try!(w.write(bytes.index(&(..(bytes.len()-1))))), + Err(..) => try!(w.write(&bytes[..(bytes.len()-1)])), } } try!(w.write(&['\n' as u8])); diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index fcde5c01080..064633f321c 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -36,7 +36,7 @@ pub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] { match v.iter().position(|c| *c == 0) { // don't include the 0 - Some(i) => v.index(&(0..i)), + Some(i) => &v[0..i], None => v } } diff --git a/src/libstd/sys/windows/pipe.rs b/src/libstd/sys/windows/pipe.rs index 016757ef63e..9996909f2f5 100644 --- a/src/libstd/sys/windows/pipe.rs +++ b/src/libstd/sys/windows/pipe.rs @@ -453,7 +453,7 @@ pub fn write(&mut self, buf: &[u8]) -> IoResult<()> { } let ret = unsafe { libc::WriteFile(self.handle(), - buf.index(&(offset..)).as_ptr() as libc::LPVOID, + buf[offset..].as_ptr() as libc::LPVOID, (buf.len() - offset) as libc::DWORD, &mut bytes_written, &mut overlapped) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 6766127a5f1..5d0ef633b0e 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -203,7 +203,7 @@ fn encode(&self, s: &mut S) -> Result<(), S::Error> { impl Decodable for Ident { fn decode(d: &mut D) -> Result { - Ok(str_to_ident(try!(d.read_str()).index(&FullRange))) + Ok(str_to_ident(&try!(d.read_str())[])) } } diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index 7496a0f9f26..adcb9ff9cc2 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -106,7 +106,7 @@ pub fn path_to_string>(path: PI) -> String { if !s.is_empty() { s.push_str("::"); } - s.push_str(e.index(&FullRange)); + s.push_str(&e[]); s }).to_string() } @@ -483,20 +483,20 @@ pub fn with_attrs(&self, id: NodeId, f: F) -> T where F: FnOnce(Option<&[Attribute]>) -> T, { let attrs = match self.get(id) { - NodeItem(i) => Some(i.attrs.index(&FullRange)), - NodeForeignItem(fi) => Some(fi.attrs.index(&FullRange)), + NodeItem(i) => Some(&i.attrs[]), + NodeForeignItem(fi) => Some(&fi.attrs[]), NodeTraitItem(ref tm) => match **tm { - RequiredMethod(ref type_m) => Some(type_m.attrs.index(&FullRange)), - ProvidedMethod(ref m) => Some(m.attrs.index(&FullRange)), - TypeTraitItem(ref typ) => Some(typ.attrs.index(&FullRange)), + RequiredMethod(ref type_m) => Some(&type_m.attrs[]), + ProvidedMethod(ref m) => Some(&m.attrs[]), + TypeTraitItem(ref typ) => Some(&typ.attrs[]), }, NodeImplItem(ref ii) => { match **ii { - MethodImplItem(ref m) => Some(m.attrs.index(&FullRange)), - TypeImplItem(ref t) => Some(t.attrs.index(&FullRange)), + MethodImplItem(ref m) => Some(&m.attrs[]), + TypeImplItem(ref t) => Some(&t.attrs[]), } } - NodeVariant(ref v) => Some(v.node.attrs.index(&FullRange)), + NodeVariant(ref v) => Some(&v.node.attrs[]), // unit/tuple structs take the attributes straight from // the struct definition. // FIXME(eddyb) make this work again (requires access to the map). @@ -520,7 +520,7 @@ pub fn nodes_matching_suffix<'a>(&'a self, parts: &'a [String]) NodesMatchingSuffix { map: self, item_name: parts.last().unwrap(), - in_which: parts.index(&(0..(parts.len() - 1))), + in_which: &parts[0..(parts.len() - 1)], idx: 0, } } @@ -597,7 +597,7 @@ fn suffix_matches(&self, parent: NodeId) -> bool { None => return false, Some((node_id, name)) => (node_id, name), }; - if part.index(&FullRange) != mod_name.as_str() { + if &part[] != mod_name.as_str() { return false; } cursor = self.map.get_parent(mod_id); @@ -635,7 +635,7 @@ fn item_is_mod(item: &Item) -> bool { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == self.item_name.index(&FullRange) && + name.as_str() == &self.item_name[] && self.suffix_matches(parent_of_n) } } @@ -1047,7 +1047,7 @@ fn print_node(&mut self, node: &Node) -> IoResult<()> { fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { let id_str = format!(" (id={})", id); - let id_str = if include_id { id_str.index(&FullRange) } else { "" }; + let id_str = if include_id { &id_str[] } else { "" }; match map.find(id) { Some(NodeItem(item)) => { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 871f1237aee..a54cdef0541 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -238,11 +238,11 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { match *trait_ref { Some(ref trait_ref) => { pretty.push('.'); - pretty.push_str(pprust::path_to_string(&trait_ref.path).index(&FullRange)); + pretty.push_str(&pprust::path_to_string(&trait_ref.path)[]); } None => {} } - token::gensym_ident(pretty.index(&FullRange)) + token::gensym_ident(&pretty[]) } pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { @@ -704,7 +704,7 @@ pub fn pat_is_ident(pat: P) -> bool { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(a.segments.index(&FullRange), b.segments.index(&FullRange))) + && (segments_name_eq(&a.segments[], &b.segments[])) } // are two arrays of segments equal when compared unhygienically? @@ -791,14 +791,14 @@ fn ident_to_segment(id : &Ident) -> PathSegment { #[test] fn idents_name_eq_test() { assert!(segments_name_eq( - [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] - .iter().map(ident_to_segment).collect::>().index(&FullRange), - [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] - .iter().map(ident_to_segment).collect::>().index(&FullRange))); + &[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] + .iter().map(ident_to_segment).collect::>()[], + &[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] + .iter().map(ident_to_segment).collect::>()[])); assert!(!segments_name_eq( - [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] - .iter().map(ident_to_segment).collect::>().index(&FullRange), - [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] - .iter().map(ident_to_segment).collect::>().index(&FullRange))); + &[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] + .iter().map(ident_to_segment).collect::>()[], + &[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] + .iter().map(ident_to_segment).collect::>()[])); } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 416fc8c2278..2cea55dfc55 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -98,7 +98,7 @@ fn value_str(&self) -> Option { fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { match self.node { - MetaList(_, ref l) => Some(l.index(&FullRange)), + MetaList(_, ref l) => Some(&l[]), _ => None } } @@ -136,8 +136,8 @@ fn with_desugared_doc(&self, f: F) -> T where let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( InternedString::new("doc"), - token::intern_and_get_ident(strip_doc_comment_decoration( - comment.get()).index(&FullRange))); + token::intern_and_get_ident(&strip_doc_comment_decoration( + comment.get())[])); if self.node.style == ast::AttrOuter { f(&mk_attr_outer(self.node.id, meta)) } else { @@ -297,9 +297,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { } MetaList(ref n, ref items) if *n == "inline" => { mark_used(attr); - if contains_name(items.index(&FullRange), "always") { + if contains_name(&items[], "always") { InlineAlways - } else if contains_name(items.index(&FullRange), "never") { + } else if contains_name(&items[], "never") { InlineNever } else { InlineHint @@ -403,7 +403,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P]) { if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, - format!("duplicate meta item `{}`", name).index(&FullRange)); + &format!("duplicate meta item `{}`", name)[]); } } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 31fe23847d9..40f239b5a5d 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -304,9 +304,9 @@ pub fn get_line(&self, line_number: uint) -> Option { lines.get(line_number).map(|&line| { let begin: BytePos = line - self.start_pos; let begin = begin.to_uint(); - let slice = self.src.index(&(begin..)); + let slice = &self.src[begin..]; match slice.find('\n') { - Some(e) => slice.index(&(0..e)), + Some(e) => &slice[0..e], None => slice }.to_string() }) @@ -351,9 +351,9 @@ pub fn new_filemap(&self, filename: FileName, src: String) -> Rc { // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. let mut src = if src.starts_with("\u{feff}") { - String::from_str(src.index(&(3..))) + String::from_str(&src[3..]) } else { - String::from_str(src.index(&FullRange)) + String::from_str(&src[]) }; // Append '\n' in case it's not already there. @@ -440,8 +440,7 @@ pub fn span_to_snippet(&self, sp: Span) -> Option { if begin.fm.start_pos != end.fm.start_pos { None } else { - Some(begin.fm.src.index(&(begin.pos.to_uint().. - end.pos.to_uint())).to_string()) + Some((&begin.fm.src[begin.pos.to_uint()..end.pos.to_uint()]).to_string()) } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index fde2fdb3c55..7e57709f33d 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -123,7 +123,7 @@ pub fn span_bug(&self, sp: Span, msg: &str) -> ! { panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_bug(sp, format!("unimplemented {}", msg).index(&FullRange)); + self.span_bug(sp, &format!("unimplemented {}", msg)[]); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler @@ -166,7 +166,7 @@ pub fn abort_if_errors(&self) { self.err_count.get()); } } - self.fatal(s.index(&FullRange)); + self.fatal(&s[]); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); @@ -182,7 +182,7 @@ pub fn bug(&self, msg: &str) -> ! { panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { - self.bug(format!("unimplemented {}", msg).index(&FullRange)); + self.bug(&format!("unimplemented {}", msg)[]); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, @@ -277,7 +277,7 @@ fn print_maybe_styled(w: &mut EmitterWriter, // to be miscolored. We assume this is rare enough that we don't // have to worry about it. if msg.ends_with("\n") { - try!(t.write_str(msg.index(&(0..(msg.len()-1))))); + try!(t.write_str(&msg[0..(msg.len()-1)])); try!(t.reset()); try!(t.write_str("\n")); } else { @@ -299,16 +299,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, } try!(print_maybe_styled(dst, - format!("{}: ", lvl.to_string()).index(&FullRange), + &format!("{}: ", lvl.to_string())[], term::attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, - format!("{}", msg).index(&FullRange), + &format!("{}", msg)[], term::attr::Bold)); match code { Some(code) => { let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); - try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).index(&FullRange), style)); + try!(print_maybe_styled(dst, &format!(" [{}]", code.clone())[], style)); } None => () } @@ -398,12 +398,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, // the span) let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; let ses = cm.span_to_string(span_end); - try!(print_diagnostic(dst, ses.index(&FullRange), lvl, msg, code)); + try!(print_diagnostic(dst, &ses[], lvl, msg, code)); if rsp.is_full_span() { try!(custom_highlight_lines(dst, cm, sp, lvl, lines)); } } else { - try!(print_diagnostic(dst, ss.index(&FullRange), lvl, msg, code)); + try!(print_diagnostic(dst, &ss[], lvl, msg, code)); if rsp.is_full_span() { try!(highlight_lines(dst, cm, sp, lvl, lines)); } @@ -413,9 +413,9 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, Some(code) => match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { - try!(print_diagnostic(dst, ss.index(&FullRange), Help, - format!("pass `--explain {}` to see a detailed \ - explanation", code).index(&FullRange), None)); + try!(print_diagnostic(dst, &ss[], Help, + &format!("pass `--explain {}` to see a detailed \ + explanation", code)[], None)); } None => () }, @@ -432,9 +432,9 @@ fn highlight_lines(err: &mut EmitterWriter, let fm = &*lines.file; let mut elided = false; - let mut display_lines = lines.lines.index(&FullRange); + let mut display_lines = &lines.lines[]; if display_lines.len() > MAX_LINES { - display_lines = display_lines.index(&(0u..MAX_LINES)); + display_lines = &display_lines[0u..MAX_LINES]; elided = true; } // Print the offending lines @@ -494,7 +494,7 @@ fn highlight_lines(err: &mut EmitterWriter, } } try!(print_maybe_styled(err, - format!("{}\n", s).index(&FullRange), + &format!("{}\n", s)[], term::attr::ForegroundColor(lvl.color()))); } Ok(()) @@ -514,7 +514,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, -> io::IoResult<()> { let fm = &*lines.file; - let lines = lines.lines.index(&FullRange); + let lines = &lines.lines[]; if lines.len() > MAX_LINES { if let Some(line) = fm.get_line(lines[0]) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, @@ -545,7 +545,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, s.push('^'); s.push('\n'); print_maybe_styled(w, - s.index(&FullRange), + &s[], term::attr::ForegroundColor(lvl.color())) } @@ -560,12 +560,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter, codemap::MacroAttribute => ("#[", "]"), codemap::MacroBang => ("", "!") }; - try!(print_diagnostic(w, ss.index(&FullRange), Note, - format!("in expansion of {}{}{}", pre, + try!(print_diagnostic(w, &ss[], Note, + &format!("in expansion of {}{}{}", pre, ei.callee.name, - post).index(&FullRange), None)); + post)[], None)); let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, ss.index(&FullRange), Note, "expansion site", None)); + try!(print_diagnostic(w, &ss[], Note, "expansion site", None)); Ok(Some(ei.call_site)) } None => Ok(None) @@ -578,6 +578,6 @@ pub fn expect(diag: &SpanHandler, opt: Option, msg: M) -> T where { match opt { Some(t) => t, - None => diag.handler().bug(msg().index(&FullRange)), + None => diag.handler().bug(&msg()[]), } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 0f4ebd74b66..1469c50061c 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -56,9 +56,9 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, with_used_diagnostics(|diagnostics| { match diagnostics.insert(code.name, span) { Some(previous_span) => { - ecx.span_warn(span, format!( + ecx.span_warn(span, &format!( "diagnostic code {} already used", token::get_ident(code).get() - ).index(&FullRange)); + )[]); ecx.span_note(previous_span, "previous invocation"); }, None => () @@ -85,14 +85,14 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, }; with_registered_diagnostics(|diagnostics| { if diagnostics.insert(code.name, description).is_some() { - ecx.span_err(span, format!( + ecx.span_err(span, &format!( "diagnostic code {} already registered", token::get_ident(*code).get() - ).index(&FullRange)); + )[]); } }); - let sym = Ident::new(token::gensym(( + let sym = Ident::new(token::gensym(&( "__register_diagnostic_".to_string() + token::get_ident(*code).get() - ).index(&FullRange))); + )[])); MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) } diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 04dec0e8028..fd3bac5b2fc 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -99,8 +99,8 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let output = match constraint.get().slice_shift_char() { Some(('=', _)) => None, Some(('+', operand)) => { - Some(token::intern_and_get_ident(format!( - "={}", operand).index(&FullRange))) + Some(token::intern_and_get_ident(&format!( + "={}", operand)[])) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 52e402689ba..11edf214435 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -539,7 +539,7 @@ pub fn original_span_in_file(&self) -> Span { pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_path(&self) -> Vec { let mut v = Vec::new(); - v.push(token::str_to_ident(self.ecfg.crate_name.index(&FullRange))); + v.push(token::str_to_ident(&self.ecfg.crate_name[])); v.extend(self.mod_path.iter().map(|a| *a)); return v; } @@ -547,8 +547,8 @@ pub fn bt_push(&mut self, ei: ExpnInfo) { self.recursion_count += 1; if self.recursion_count > self.ecfg.recursion_limit { self.span_fatal(ei.call_site, - format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name).index(&FullRange)); + &format!("recursion limit reached while expanding the macro `{}`", + ei.callee.name)[]); } let mut call_site = ei.call_site; @@ -670,7 +670,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { - cx.span_err(sp, format!("{} takes no arguments", name).index(&FullRange)); + cx.span_err(sp, &format!("{} takes no arguments", name)[]); } } @@ -683,12 +683,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, -> Option { let mut p = cx.new_parser_from_tts(tts); if p.token == token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange)); + cx.span_err(sp, &format!("{} takes 1 argument", name)[]); return None } let ret = cx.expander().fold_expr(p.parse_expr()); if p.token != token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange)); + cx.span_err(sp, &format!("{} takes 1 argument", name)[]); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { s.get().to_string() diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index bd4f295401c..27523ea4535 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -708,8 +708,7 @@ fn expr_tuple(&self, sp: Span, exprs: Vec>) -> P { fn expr_fail(&self, span: Span, msg: InternedString) -> P { let loc = self.codemap().lookup_char_pos(span.lo); let expr_file = self.expr_str(span, - token::intern_and_get_ident(loc.file - .name.index(&FullRange))); + token::intern_and_get_ident(&loc.file.name[])); let expr_line = self.expr_uint(span, loc.line); let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 1f1781dceb3..39895a3946a 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitInt(i, ast::UnsignedIntLit(_)) | ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => { - accumulator.push_str(format!("{}", i).index(&FullRange)); + accumulator.push_str(&format!("{}", i)[]); } ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => { - accumulator.push_str(format!("-{}", i).index(&FullRange)); + accumulator.push_str(&format!("-{}", i)[]); } ast::LitBool(b) => { - accumulator.push_str(format!("{}", b).index(&FullRange)); + accumulator.push_str(&format!("{}", b)[]); } ast::LitByte(..) | ast::LitBinary(..) => { @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(accumulator.index(&FullRange)))) + token::intern_and_get_ident(&accumulator[]))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 02f702248cb..1af3ba1d326 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(res_str.index(&FullRange)); + let res = str_to_ident(&res_str[]); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index d9d6cebd05c..784a92b9a0e 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -79,12 +79,12 @@ fn cs_clone( }, EnumNonMatchingCollapsed (..) => { cx.span_bug(trait_span, - format!("non-matching enum variants in \ - `deriving({})`", name).index(&FullRange)) + &format!("non-matching enum variants in \ + `deriving({})`", name)[]) } StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, - format!("static method in `deriving({})`", name).index(&FullRange)) + &format!("static method in `deriving({})`", name)[]) } } @@ -100,8 +100,8 @@ fn cs_clone( Some(i) => i, None => { cx.span_bug(trait_span, - format!("unnamed field in normal struct in \ - `deriving({})`", name).index(&FullRange)) + &format!("unnamed field in normal struct in \ + `deriving({})`", name)[]) } }; cx.field_imm(field.span, ident, subcall(field)) diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index a9289f0175a..7c65d2b4ff4 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -197,8 +197,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, } else { let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, - token::intern_and_get_ident(format!("_field{}", - i).index(&FullRange)), + token::intern_and_get_ident(&format!("_field{}", i)[]), i) }).collect(); diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 7114217d51d..616390467f0 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -183,7 +183,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = match name { Some(id) => token::get_ident(id), None => { - token::intern_and_get_ident(format!("_field{}", i).index(&FullRange)) + token::intern_and_get_ident(&format!("_field{}", i)[]) } }; let enc = cx.expr_method_call(span, self_.clone(), diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 50b3559f369..47b29a4db3e 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -510,15 +510,15 @@ fn expand_struct_def(&self, self, struct_def, type_ident, - self_args.index(&FullRange), - nonself_args.index(&FullRange)) + &self_args[], + &nonself_args[]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - self_args.index(&FullRange), - nonself_args.index(&FullRange)) + &self_args[], + &nonself_args[]) }; method_def.create_method(cx, @@ -550,15 +550,15 @@ fn expand_enum_def(&self, self, enum_def, type_ident, - self_args.index(&FullRange), - nonself_args.index(&FullRange)) + &self_args[], + &nonself_args[]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - nonself_args.index(&FullRange)) + &nonself_args[]) }; method_def.create_method(cx, @@ -645,7 +645,7 @@ fn split_self_nonself_args(&self, for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(format!("__arg_{}", i).index(&FullRange)); + let ident = cx.ident_of(&format!("__arg_{}", i)[]); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -751,8 +751,8 @@ fn expand_struct_method_body(&self, trait_.create_struct_pattern(cx, struct_path, struct_def, - format!("__self_{}", - i).index(&FullRange), + &format!("__self_{}", + i)[], ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); @@ -908,22 +908,22 @@ fn build_enum_match_tuple( .collect::>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(name.index(&FullRange))) + .map(|name|cx.ident_of(&name[])) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a uint // corresponding to its variant index. let vi_idents: Vec = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", name.index(&FullRange)); - cx.ident_of(vi_suffix.index(&FullRange)) }) + .map(|name| { let vi_suffix = format!("{}_vi", &name[]); + cx.ident_of(&vi_suffix[]) }) .collect::>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, variants.index(&FullRange), vi_idents.index(&FullRange)); + self_arg_idents, &variants[], &vi_idents[]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -945,12 +945,12 @@ fn build_enum_match_tuple( let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, self_arg_names[0].index(&FullRange)); + let (p, idents) = mk_self_pat(cx, &self_arg_names[0][]); subpats.push(p); idents }; for self_arg_name in self_arg_names.tail().iter() { - let (p, idents) = mk_self_pat(cx, self_arg_name.index(&FullRange)); + let (p, idents) = mk_self_pat(cx, &self_arg_name[]); subpats.push(p); self_pats_idents.push(idents); } @@ -1006,7 +1006,7 @@ fn build_enum_match_tuple( &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args.index(&FullRange), nonself_args, + cx, trait_, type_ident, &self_args[], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1059,7 +1059,7 @@ fn build_enum_match_tuple( } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args.index(&FullRange), nonself_args, + cx, trait_, type_ident, &self_args[], nonself_args, &catch_all_substructure); // Builds the expression: @@ -1263,7 +1263,7 @@ fn create_struct_pattern(&self, cx.span_bug(sp, "a struct with named and unnamed fields in `derive`"); } }; - let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange)); + let ident = cx.ident_of(&format!("{}_{}", prefix, i)[]); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); @@ -1309,7 +1309,7 @@ fn create_enum_variant_pattern(&self, let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); - let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange)); + let ident = cx.ident_of(&format!("{}_{}", prefix, i)[]); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); @@ -1352,7 +1352,7 @@ pub fn cs_fold(use_foldl: bool, field.span, old, field.self_.clone(), - field.other.index(&FullRange)) + &field.other[]) }) } else { all_fields.iter().rev().fold(base, |old, field| { @@ -1360,12 +1360,12 @@ pub fn cs_fold(use_foldl: bool, field.span, old, field.self_.clone(), - field.other.index(&FullRange)) + &field.other[]) }) } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (all_args.index(&FullRange), tuple), + enum_nonmatch_f(cx, trait_span, (&all_args[], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") @@ -1405,7 +1405,7 @@ pub fn cs_same_method(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (all_self_args.index(&FullRange), tuple), + enum_nonmatch_f(cx, trait_span, (&all_self_args[], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 43a0e0606f8..6040d4ee547 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -121,9 +121,9 @@ macro_rules! expand { ref tname => { cx.span_err(titem.span, - format!("unknown `derive` \ + &format!("unknown `derive` \ trait: `{}`", - *tname).index(&FullRange)); + *tname)[]); } }; } diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index fa9a7899a12..48034ce50ab 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(format_string.index(&FullRange)); + let s = token::intern_and_get_ident(&format_string[]); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index eb3544e3c5c..9b54e259761 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match os::getenv(var.index(&FullRange)) { + let e = match os::getenv(&var[]) { None => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - s.index(&FullRange))))) + &s[])))) } }; MacExpr::new(e) @@ -81,9 +81,9 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) }; let msg = match exprs.next() { None => { - token::intern_and_get_ident(format!("environment variable `{}` \ + token::intern_and_get_ident(&format!("environment variable `{}` \ not defined", - var).index(&FullRange)) + var)[]) } Some(second) => { match expr_to_string(cx, second, "expected string literal") { @@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.index(&FullRange))) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[])) }; MacExpr::new(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 3e1bccf394a..1393e54d597 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -286,8 +286,8 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, None => { fld.cx.span_err( pth.span, - format!("macro undefined: '{}!'", - extnamestr.get()).index(&FullRange)); + &format!("macro undefined: '{}!'", + extnamestr.get())[]); // let compilation continue None @@ -303,7 +303,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(tts.index(&FullRange), fm); + let marked_before = mark_tts(&tts[], fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -314,7 +314,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - marked_before.index(&FullRange)); + &marked_before[]); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -322,9 +322,9 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, None => { fld.cx.span_err( pth.span, - format!("non-expression macro in expression position: {}", - extnamestr.get().index(&FullRange) - ).index(&FullRange)); + &format!("non-expression macro in expression position: {}", + &extnamestr.get()[] + )[]); return None; } }; @@ -333,8 +333,8 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, _ => { fld.cx.span_err( pth.span, - format!("'{}' is not a tt-style macro", - extnamestr.get()).index(&FullRange)); + &format!("'{}' is not a tt-style macro", + extnamestr.get())[]); None } } @@ -439,7 +439,7 @@ pub fn expand_item(it: P, fld: &mut MacroExpander) if valid_ident { fld.cx.mod_push(it.ident); } - let macro_use = contains_macro_use(fld, new_attrs.index(&FullRange)); + let macro_use = contains_macro_use(fld, &new_attrs[]); let result = with_exts_frame!(fld.cx.syntax_env, macro_use, noop_fold_item(it, fld)); @@ -565,8 +565,8 @@ pub fn expand_item_mac(it: P, let expanded = match fld.cx.syntax_env.find(&extname.name) { None => { fld.cx.span_err(path_span, - format!("macro undefined: '{}!'", - extnamestr).index(&FullRange)); + &format!("macro undefined: '{}!'", + extnamestr)[]); // let compilation continue return SmallVector::zero(); } @@ -576,10 +576,10 @@ pub fn expand_item_mac(it: P, if it.ident.name != parse::token::special_idents::invalid.name { fld.cx .span_err(path_span, - format!("macro {}! expects no ident argument, \ + &format!("macro {}! expects no ident argument, \ given '{}'", extnamestr, - token::get_ident(it.ident)).index(&FullRange)); + token::get_ident(it.ident))[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -591,14 +591,14 @@ pub fn expand_item_mac(it: P, } }); // mark before expansion: - let marked_before = mark_tts(tts.index(&FullRange), fm); - expander.expand(fld.cx, it.span, marked_before.index(&FullRange)) + let marked_before = mark_tts(&tts[], fm); + expander.expand(fld.cx, it.span, &marked_before[]) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, - format!("macro {}! expects an ident argument", - extnamestr.get()).index(&FullRange)); + &format!("macro {}! expects an ident argument", + extnamestr.get())[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -610,14 +610,14 @@ pub fn expand_item_mac(it: P, } }); // mark before expansion: - let marked_tts = mark_tts(tts.index(&FullRange), fm); + let marked_tts = mark_tts(&tts[], fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } MacroRulesTT => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, - format!("macro_rules! expects an ident argument") - .index(&FullRange)); + &format!("macro_rules! expects an ident argument") + []); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -648,8 +648,8 @@ pub fn expand_item_mac(it: P, } _ => { fld.cx.span_err(it.span, - format!("{}! is not legal in item position", - extnamestr.get()).index(&FullRange)); + &format!("{}! is not legal in item position", + extnamestr.get())[]); return SmallVector::zero(); } } @@ -667,8 +667,8 @@ pub fn expand_item_mac(it: P, } None => { fld.cx.span_err(path_span, - format!("non-item macro in item position: {}", - extnamestr.get()).index(&FullRange)); + &format!("non-item macro in item position: {}", + extnamestr.get())[]); return SmallVector::zero(); } }; @@ -913,8 +913,8 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { let marked_after = match fld.cx.syntax_env.find(&extname.name) { None => { fld.cx.span_err(pth.span, - format!("macro undefined: '{}!'", - extnamestr).index(&FullRange)); + &format!("macro undefined: '{}!'", + extnamestr)[]); // let compilation continue return DummyResult::raw_pat(span); } @@ -931,19 +931,19 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { }); let fm = fresh_mark(); - let marked_before = mark_tts(tts.index(&FullRange), fm); + let marked_before = mark_tts(&tts[], fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - marked_before.index(&FullRange)).make_pat() { + &marked_before[]).make_pat() { Some(e) => e, None => { fld.cx.span_err( pth.span, - format!( + &format!( "non-pattern macro in pattern position: {}", extnamestr.get() - ).index(&FullRange) + )[] ); return DummyResult::raw_pat(span); } @@ -954,8 +954,8 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { } _ => { fld.cx.span_err(span, - format!("{}! is not legal in pattern position", - extnamestr.get()).index(&FullRange)); + &format!("{}! is not legal in pattern position", + extnamestr.get())[]); return DummyResult::raw_pat(span); } } @@ -1232,7 +1232,7 @@ fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(tts.index(&FullRange)), + self.fold_tts(&tts[]), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1713,7 +1713,7 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) { let string = ident.get(); "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = cxbinds.index(&FullRange); + let cxbinds: &[&ast::Ident] = &cxbinds[]; let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 44a596d2657..637b6d4649d 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -112,8 +112,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } _ => { ecx.span_err(p.span, - format!("expected ident for named argument, found `{}`", - p.this_token_to_string()).index(&FullRange)); + &format!("expected ident for named argument, found `{}`", + p.this_token_to_string())[]); return None; } }; @@ -125,8 +125,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) None => {} Some(prev) => { ecx.span_err(e.span, - format!("duplicate argument named `{}`", - name).index(&FullRange)); + &format!("duplicate argument named `{}`", + name)[]); ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here"); continue } @@ -217,7 +217,7 @@ fn verify_arg_type(&mut self, arg: Position, ty: ArgumentType) { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, msg.index(&FullRange)); + self.ecx.span_err(self.fmtsp, &msg[]); return; } { @@ -237,7 +237,7 @@ fn verify_arg_type(&mut self, arg: Position, ty: ArgumentType) { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, msg.index(&FullRange)); + self.ecx.span_err(self.fmtsp, &msg[]); return; } }; @@ -277,22 +277,22 @@ fn verify_same(&self, match (cur, ty) { (&Known(ref cur), &Known(ref ty)) => { self.ecx.span_err(sp, - format!("argument redeclared with type `{}` when \ + &format!("argument redeclared with type `{}` when \ it was previously `{}`", *ty, - *cur).index(&FullRange)); + *cur)[]); } (&Known(ref cur), _) => { self.ecx.span_err(sp, - format!("argument used to format with `{}` was \ + &format!("argument used to format with `{}` was \ attempted to not be used for formatting", - *cur).index(&FullRange)); + *cur)[]); } (_, &Known(ref ty)) => { self.ecx.span_err(sp, - format!("argument previously used as a format \ + &format!("argument previously used as a format \ argument attempted to be used as `{}`", - *ty).index(&FullRange)); + *ty)[]); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -357,7 +357,7 @@ fn trans_count(&self, c: parse::Count) -> P { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P { let sp = self.fmtsp; - let s = token::intern_and_get_ident(self.literal.index(&FullRange)); + let s = token::intern_and_get_ident(&self.literal[]); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -509,7 +509,7 @@ fn into_expr(mut self) -> P { None => continue // error already generated }; - let name = self.ecx.ident_of(format!("__arg{}", i).index(&FullRange)); + let name = self.ecx.ident_of(&format!("__arg{}", i)[]); pats.push(self.ecx.pat_ident(e.span, name)); locals.push(Context::format_arg(self.ecx, e.span, arg_ty, self.ecx.expr_ident(e.span, name))); @@ -525,8 +525,8 @@ fn into_expr(mut self) -> P { None => continue }; - let lname = self.ecx.ident_of(format!("__arg{}", - *name).index(&FullRange)); + let lname = self.ecx.ident_of(&format!("__arg{}", + *name)[]); pats.push(self.ecx.pat_ident(e.span, lname)); names[self.name_positions[*name]] = Some(Context::format_arg(self.ecx, e.span, arg_ty, @@ -606,7 +606,7 @@ fn format_arg(ecx: &ExtCtxt, sp: Span, -> P { let trait_ = match *ty { Known(ref tyname) => { - match tyname.index(&FullRange) { + match &tyname[] { "" => "String", "?" => "Show", "e" => "LowerExp", @@ -618,8 +618,8 @@ fn format_arg(ecx: &ExtCtxt, sp: Span, "X" => "UpperHex", _ => { ecx.span_err(sp, - format!("unknown format trait `{}`", - *tyname).index(&FullRange)); + &format!("unknown format trait `{}`", + *tyname)[]); "Dummy" } } @@ -709,8 +709,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, } } if !parser.errors.is_empty() { - cx.ecx.span_err(cx.fmtsp, format!("invalid format string: {}", - parser.errors.remove(0)).index(&FullRange)); + cx.ecx.span_err(cx.fmtsp, &format!("invalid format string: {}", + parser.errors.remove(0))[]); return DummyResult::raw_expr(sp); } if !cx.literal.is_empty() { diff --git a/src/libsyntax/ext/mtwt.rs b/src/libsyntax/ext/mtwt.rs index bebd803ac4f..ae8ff118fcc 100644 --- a/src/libsyntax/ext/mtwt.rs +++ b/src/libsyntax/ext/mtwt.rs @@ -223,7 +223,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec { } // the internal function for computing marks -// it's not clear to me whether it's better to use a .index(&FullRange) mutable +// it's not clear to me whether it's better to use a [] mutable // vector or a cons-list for this. fn marksof_internal(ctxt: SyntaxContext, stopname: Name, diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 77aea0c370a..2dbf29c145c 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -473,7 +473,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, } fn ids_ext(strs: Vec ) -> Vec { - strs.iter().map(|str| str_to_ident((*str).index(&FullRange))).collect() + strs.iter().map(|str| str_to_ident(&(*str)[])).collect() } fn id_ext(str: &str) -> ast::Ident { @@ -675,7 +675,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec> { for i in range(0, tt.len()) { seq.push(tt.get_tt(i)); } - mk_tts(cx, seq.index(&FullRange)) + mk_tts(cx, &seq[]) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -764,7 +764,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, tts.index(&FullRange)).into_iter()); + vector.extend(mk_tts(cx, &tts[]).into_iter()); let block = cx.expr_block( cx.block_all(sp, Vec::new(), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 1ba91dd371c..b671b1a71b0 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = cx.original_span_in_file(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); + let filename = token::intern_and_get_ident(&loc.file.name[]); base::MacExpr::new(cx.expr_str(topmost, filename)) } @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(s.index(&FullRange)))) + token::intern_and_get_ident(&s[]))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(string.index(&FullRange)))) + token::intern_and_get_ident(&string[]))) } /// include! : parse the given file as an expr @@ -135,9 +135,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let bytes = match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - format!("couldn't read {:?}: {}", + &format!("couldn't read {:?}: {}", file.display(), - e).index(&FullRange)); + e)[]); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -147,15 +147,15 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = format!("{:?}", file.display()); - let interned = token::intern_and_get_ident(src.index(&FullRange)); + let interned = token::intern_and_get_ident(&src[]); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) } Err(_) => { cx.span_err(sp, - format!("{:?} wasn't a utf-8 file", - file.display()).index(&FullRange)); + &format!("{:?} wasn't a utf-8 file", + file.display())[]); return DummyResult::expr(sp); } } @@ -177,7 +177,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - format!("couldn't read {:?}: {}", file.display(), e).index(&FullRange)); + &format!("couldn't read {:?}: {}", file.display(), e)[]); return DummyResult::expr(sp); } Ok(bytes) => { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d33d03bbfa9..9eda4bcef99 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { seq.num_captures } &TtDelimited(_, ref delim) => { - count_names(delim.tts.index(&FullRange)) + count_names(&delim.tts[]) } &TtToken(_, MatchNt(..)) => { 1 @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: BytePos) -> Box { - let match_idx_hi = count_names(ms.index(&FullRange)); + let match_idx_hi = count_names(&ms[]); let matches: Vec<_> = range(0, match_idx_hi).map(|_| Vec::new()).collect(); box MatcherPos { stack: vec![], @@ -228,8 +228,8 @@ fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc], let string = token::get_ident(bind_name); p_s.span_diagnostic .span_fatal(sp, - format!("duplicated bind name: {}", - string.get()).index(&FullRange)) + &format!("duplicated bind name: {}", + string.get())[]) } } } @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec ) -> HashMap> { - match parse(sess, cfg, rdr, ms.index(&FullRange)) { + match parse(sess, cfg, rdr, &ms[]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.index(&FullRange)) + sess.span_diagnostic.span_fatal(sp, &str[]) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.index(&FullRange)) + sess.span_diagnostic.span_fatal(sp, &str[]) } } } @@ -447,7 +447,7 @@ pub fn parse(sess: &ParseSess, for dv in (&mut eof_eis[0]).matches.iter_mut() { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, v.index(&FullRange))); + return Success(nameize(sess, ms, &v[])); } else if eof_eis.len() > 1u { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -532,8 +532,8 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) } _ => { let token_str = pprust::token_to_string(&p.token); - p.fatal((format!("expected ident, found {}", - token_str.index(&FullRange))).index(&FullRange)) + p.fatal(&format!("expected ident, found {}", + &token_str[])[]) } }, "path" => { @@ -541,7 +541,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { } "meta" => token::NtMeta(p.parse_meta_item()), _ => { - p.fatal(format!("unsupported builtin nonterminal parser: {}", name).index(&FullRange)) + p.fatal(&format!("unsupported builtin nonterminal parser: {}", name)[]) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 64c53e298ef..fc341e3bd85 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -52,7 +52,7 @@ fn ensure_complete_parse(&self, allow_semi: bool) { following", token_str); let span = parser.span; - parser.span_err(span, msg.index(&FullRange)); + parser.span_err(span, &msg[]); } } } @@ -126,8 +126,8 @@ fn expand<'cx>(&self, self.name, self.imported_from, arg, - self.lhses.index(&FullRange), - self.rhses.index(&FullRange)) + &self.lhses[], + &self.rhses[]) } } @@ -154,7 +154,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => delim.tts.index(&FullRange), + TtDelimited(_, ref delim) => &delim.tts[], _ => cx.span_fatal(sp, "malformed macro lhs") }; // `None` is because we're not interpolating @@ -195,13 +195,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, msg.index(&FullRange)) + Error(sp, ref msg) => cx.span_fatal(sp, &msg[]) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, best_fail_msg.index(&FullRange)); + cx.span_fatal(best_fail_spot, &best_fail_msg[]); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index bc07c7f6cae..94b8356130a 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -255,7 +255,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), msg.index(&FullRange)); + r.sp_diag.span_fatal(sp.clone(), &msg[]); } LisConstraint(len, _) => { if len == 0 { @@ -308,8 +308,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { MatchedSeq(..) => { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ - format!("variable '{:?}' is still repeating at this depth", - token::get_ident(ident)).index(&FullRange)); + &format!("variable '{:?}' is still repeating at this depth", + token::get_ident(ident))[]); } } } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index f10113254de..d8b6cc535fc 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -150,9 +150,9 @@ impl<'a> Context<'a> { fn gate_feature(&self, feature: &str, span: Span, explain: &str) { if !self.has_feature(feature) { self.span_handler.span_err(span, explain); - self.span_handler.span_help(span, format!("add #![feature({})] to the \ + self.span_handler.span_help(span, &format!("add #![feature({})] to the \ crate attributes to enable", - feature).index(&FullRange)); + feature)[]); } } @@ -243,7 +243,7 @@ fn visit_item(&mut self, i: &ast::Item) { } match i.node { ast::ItemForeignMod(ref foreign_module) => { - if attr::contains_name(i.attrs.index(&FullRange), "link_args") { + if attr::contains_name(&i.attrs[], "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ @@ -257,14 +257,14 @@ fn visit_item(&mut self, i: &ast::Item) { } ast::ItemFn(..) => { - if attr::contains_name(i.attrs.index(&FullRange), "plugin_registrar") { + if attr::contains_name(&i.attrs[], "plugin_registrar") { self.gate_feature("plugin_registrar", i.span, "compiler plugins are experimental and possibly buggy"); } } ast::ItemStruct(..) => { - if attr::contains_name(i.attrs.index(&FullRange), "simd") { + if attr::contains_name(&i.attrs[], "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } @@ -290,7 +290,7 @@ fn visit_item(&mut self, i: &ast::Item) { removed in the future"); } - if attr::contains_name(i.attrs.index(&FullRange), + if attr::contains_name(&i.attrs[], "old_orphan_check") { self.gate_feature( "old_orphan_check", @@ -298,7 +298,7 @@ fn visit_item(&mut self, i: &ast::Item) { "the new orphan check rules will eventually be strictly enforced"); } - if attr::contains_name(i.attrs.index(&FullRange), + if attr::contains_name(&i.attrs[], "old_impl_check") { self.gate_feature("old_impl_check", i.span, @@ -313,7 +313,7 @@ fn visit_item(&mut self, i: &ast::Item) { } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { - if attr::contains_name(i.attrs.index(&FullRange), "linkage") { + if attr::contains_name(&i.attrs[], "linkage") { self.gate_feature("linkage", i.span, "the `linkage` attribute is experimental \ and not portable across platforms") diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 4aad7f911db..54ec9c7b146 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -92,7 +92,7 @@ fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `#`, found `{}`", token_str).index(&FullRange)); + self.fatal(&format!("expected `#`, found `{}`", token_str)[]); } }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index e7fc5aac9c7..16ade904be8 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -82,7 +82,7 @@ fn vertical_trim(lines: Vec ) -> Vec { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines.index(&(i..j)).iter().map(|x| (*x).clone()).collect(); + return lines[i..j].iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible @@ -116,7 +116,7 @@ fn horizontal_trim(lines: Vec ) -> Vec { if can_trim { lines.iter().map(|line| { - line.index(&((i + 1)..line.len())).to_string() + (&line[(i + 1)..line.len()]).to_string() }).collect() } else { lines @@ -127,12 +127,12 @@ fn horizontal_trim(lines: Vec ) -> Vec { static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; for prefix in ONLINERS.iter() { if comment.starts_with(*prefix) { - return comment.index(&(prefix.len()..)).to_string(); + return (&comment[prefix.len()..]).to_string(); } } if comment.starts_with("/*") { - let lines = comment.index(&(3u..(comment.len() - 2u))) + let lines = comment[3u..(comment.len() - 2u)] .lines_any() .map(|s| s.to_string()) .collect:: >(); @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(line.index(&FullRange)) { + if is_doc_comment(&line[]) { break; } lines.push(line); @@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(s.index(&FullRange), col) { + let s1 = match all_whitespace(&s[], col) { Some(col) => { if col < len { - s.index(&(col..len)).to_string() + (&s[col..len]).to_string() } else { "".to_string() } @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(curr_line.index(&FullRange)) { + if is_block_doc_comment(&curr_line[]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 153b18b8760..4cdafb36eec 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -196,7 +196,7 @@ fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); + self.fatal_span_(from_pos, to_pos, &m[]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +205,7 @@ fn err_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, m.index(&FullRange)); + self.err_span_(from_pos, to_pos, &m[]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -214,8 +214,8 @@ fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) m.push_str(": "); let from = self.byte_offset(from_pos).to_uint(); let to = self.byte_offset(to_pos).to_uint(); - m.push_str(self.filemap.src.index(&(from..to))); - self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); + m.push_str(&self.filemap.src[from..to]); + self.fatal_span_(from_pos, to_pos, &m[]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -301,7 +301,7 @@ fn translate_crlf_(rdr: &StringReader, start: BytePos, while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { - if j < i { buf.push_str(s.index(&(j..i))); } + if j < i { buf.push_str(&s[j..i]); } j = next; if next >= s.len() || s.char_at(next) != '\n' { let pos = start + BytePos(i as u32); @@ -311,7 +311,7 @@ fn translate_crlf_(rdr: &StringReader, start: BytePos, } i = next; } - if j < s.len() { buf.push_str(s.index(&(j..))); } + if j < s.len() { buf.push_str(&s[j..]); } buf } } @@ -556,7 +556,7 @@ fn scan_block_comment(&mut self) -> Option { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(string.index(&FullRange))) + token::DocComment(token::intern(&string[])) } else { token::Comment }; @@ -1110,7 +1110,7 @@ fn next_token_inner(&mut self) -> token::Token { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(format!("'{}", lifetime_name).index(&FullRange)) + str_to_ident(&format!("'{}", lifetime_name)[]) }); // Conjure up a "keyword checking ident" to make sure that diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d26b3af67bd..c42a6beea2d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -253,19 +253,19 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) let bytes = match File::open(path).read_to_end() { Ok(bytes) => bytes, Err(e) => { - err(format!("couldn't read {:?}: {:?}", + err(&format!("couldn't read {:?}: {:?}", path.display(), - e).index(&FullRange)); + e)[]); unreachable!() } }; - match str::from_utf8(bytes.index(&FullRange)).ok() { + match str::from_utf8(&bytes[]).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) } None => { - err(format!("{:?} is not UTF-8 encoded", path.display()).index(&FullRange)) + err(&format!("{:?} is not UTF-8 encoded", path.display())[]) } } unreachable!() @@ -399,10 +399,10 @@ pub fn char_lit(lit: &str) -> (char, int) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = msg.index(&FullRange); + let msg2 = &msg[]; fn esc(len: uint, lit: &str) -> Option<(char, int)> { - num::from_str_radix(lit.index(&(2..len)), 16) + num::from_str_radix(&lit[2..len], 16) .and_then(char::from_u32) .map(|x| (x, len as int)) } @@ -410,7 +410,7 @@ fn esc(len: uint, lit: &str) -> Option<(char, int)> { let unicode_escape = |&: | -> Option<(char, int)> if lit.as_bytes()[2] == b'{' { let idx = lit.find('}').expect(msg2); - let subslice = lit.index(&(3..idx)); + let subslice = &lit[3..idx]; num::from_str_radix(subslice, 16) .and_then(char::from_u32) .map(|x| (x, subslice.chars().count() as int + 4)) @@ -472,7 +472,7 @@ fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) { eat(&mut chars); } else { // otherwise, a normal escape - let (c, n) = char_lit(lit.index(&(i..))); + let (c, n) = char_lit(&lit[i..]); for _ in range(0, n - 1) { // we don't need to move past the first \ chars.next(); } @@ -535,7 +535,7 @@ pub fn raw_str_lit(lit: &str) -> String { fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && first_chars.contains(&s.char_at(0)) && - s.index(&(1..)).chars().all(|c| '0' <= c && c <= '9') + s[1..].chars().all(|c| '0' <= c && c <= '9') } fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, @@ -548,7 +548,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.span_err(sp, &*format!("illegal width `{}` for float literal, \ - valid widths are 32 and 64", suf.index(&(1..)))); + valid widths are 32 and 64", &suf[1..])); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \ valid suffixes are `f32` and `f64`", suf)); @@ -584,7 +584,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) { b'\'' => b'\'', b'0' => b'\0', _ => { - match ::std::num::from_str_radix::(lit.index(&(2..4)), 16) { + match ::std::num::from_str_radix::(&lit[2..4], 16) { Some(c) => if c > 0xFF { panic!(err(2)) @@ -634,7 +634,7 @@ fn eat<'a, I: Iterator>(it: &mut iter::Peekable<(uint, u8), I>) } _ => { // otherwise, a normal escape - let (c, n) = byte_lit(lit.index(&(i..))); + let (c, n) = byte_lit(&lit[i..]); // we don't need to move past the first \ for _ in range(0, n - 1) { chars.next(); @@ -663,7 +663,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); - let mut s = s2.index(&FullRange); + let mut s = &s2[]; debug!("integer_lit: {}, {:?}", s, suffix); @@ -696,7 +696,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } if base != 10 { - s = s.index(&(2..)); + s = &s[2..]; } if let Some(suf) = suffix { @@ -720,7 +720,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> if looks_like_width_suffix(&['i', 'u'], suf) { sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \ valid widths are 8, 16, 32 and 64", - suf.index(&(1..)))); + &suf[1..])); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf)); } @@ -818,7 +818,7 @@ fn sp(a: u32, b: u32) -> Span { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = tts.index(&FullRange); + let tts: &[ast::TokenTree] = &tts[]; match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -826,19 +826,19 @@ fn string_to_tts_macro () { ast::TtDelimited(_, ref macro_delimed)] if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match macro_delimed.tts.index(&FullRange) { + match ¯o_delimed.tts[] { [ast::TtDelimited(_, ref first_delimed), ast::TtToken(_, token::FatArrow), ast::TtDelimited(_, ref second_delimed)] if macro_delimed.delim == token::Paren => { - match first_delimed.tts.index(&FullRange) { + match &first_delimed.tts[] { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?}", **first_delimed), } - match second_delimed.tts.index(&FullRange) { + match &second_delimed.tts[] { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if second_delimed.delim == token::Paren @@ -1116,24 +1116,24 @@ fn parser_done(p: Parser){ let use_s = "use foo::bar::baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), use_s); + assert_eq!(&vitem_s[], use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), use_s); + assert_eq!(&vitem_s[], use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), ex_s); + assert_eq!(&vitem_s[], ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), ex_s); + assert_eq!(&vitem_s[], ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec { @@ -1212,7 +1212,7 @@ fn wb() -> c_int { O_WRONLY as c_int } let docs = item.attrs.iter().filter(|a| a.name().get() == "doc") .map(|a| a.value_str().unwrap().get().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(docs.index(&FullRange), b); + assert_eq!(&docs[], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 23728c74ae8..e9e207e7dbc 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -127,13 +127,13 @@ fn report(&mut self, kind_str: &str, desc: &str) { self.span_err(sp, - format!("obsolete syntax: {}", kind_str).index(&FullRange)); + &format!("obsolete syntax: {}", kind_str)[]); if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(format!("{}", desc).index(&FullRange)); + .note(&format!("{}", desc)[]); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9c16dbb2c5c..f777a1aef67 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -388,13 +388,13 @@ pub fn this_token_to_string(&mut self) -> String { pub fn unexpected_last(&mut self, t: &token::Token) -> ! { let token_str = Parser::token_to_string(t); let last_span = self.last_span; - self.span_fatal(last_span, format!("unexpected token: `{}`", - token_str).index(&FullRange)); + self.span_fatal(last_span, &format!("unexpected token: `{}`", + token_str)[]); } pub fn unexpected(&mut self) -> ! { let this_token = self.this_token_to_string(); - self.fatal(format!("unexpected token: `{}`", this_token).index(&FullRange)); + self.fatal(&format!("unexpected token: `{}`", this_token)[]); } /// Expect and consume the token t. Signal an error if @@ -406,9 +406,9 @@ pub fn expect(&mut self, t: &token::Token) { } else { let token_str = Parser::token_to_string(t); let this_token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", token_str, - this_token_str).index(&FullRange)) + this_token_str)[]) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -449,10 +449,10 @@ fn tokens_to_string(tokens: &[TokenType]) -> String { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(expected.index(&FullRange)); + let expect = tokens_to_string(&expected[]); let actual = self.this_token_to_string(); self.fatal( - (if expected.len() != 1 { + &(if expected.len() != 1 { (format!("expected one of {}, found `{}`", expect, actual)) @@ -460,7 +460,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String { (format!("expected {}, found `{}`", expect, actual)) - }).index(&FullRange) + }[]) ) } } @@ -493,7 +493,7 @@ pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token], inedible: &[tok // might be unit-struct construction; check for recoverableinput error. let mut expected = edible.iter().map(|x| x.clone()).collect::>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(expected.index(&FullRange)); + self.check_for_erroneous_unit_struct_expecting(&expected[]); } self.expect_one_of(edible, inedible) } @@ -510,9 +510,9 @@ pub fn commit_stmt(&mut self, edible: &[token::Token], inedible: &[token::Token] .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { let mut expected = edible.iter().map(|x| x.clone()).collect::>(); - expected.push_all(inedible.index(&FullRange)); + expected.push_all(&inedible[]); self.check_for_erroneous_unit_struct_expecting( - expected.index(&FullRange)); + &expected[]); } self.expect_one_of(edible, inedible) } @@ -534,8 +534,8 @@ pub fn parse_ident(&mut self) -> ast::Ident { } _ => { let token_str = self.this_token_to_string(); - self.fatal((format!("expected ident, found `{}`", - token_str)).index(&FullRange)) + self.fatal(&format!("expected ident, found `{}`", + token_str)[]) } } } @@ -592,8 +592,8 @@ pub fn expect_keyword(&mut self, kw: keywords::Keyword) { if !self.eat_keyword(kw) { let id_interned_str = token::get_name(kw.to_name()); let token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", - id_interned_str, token_str).index(&FullRange)) + self.fatal(&format!("expected `{}`, found `{}`", + id_interned_str, token_str)[]) } } @@ -603,8 +603,8 @@ pub fn check_strict_keywords(&mut self) { let token_str = self.this_token_to_string(); let span = self.span; self.span_err(span, - format!("expected identifier, found keyword `{}`", - token_str).index(&FullRange)); + &format!("expected identifier, found keyword `{}`", + token_str)[]); } } @@ -612,8 +612,8 @@ pub fn check_strict_keywords(&mut self) { pub fn check_reserved_keywords(&mut self) { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); - self.fatal(format!("`{}` is a reserved keyword", - token_str).index(&FullRange)) + self.fatal(&format!("`{}` is a reserved keyword", + token_str)[]) } } @@ -631,9 +631,9 @@ fn expect_and(&mut self) { let token_str = self.this_token_to_string(); let found_token = Parser::token_to_string(&token::BinOp(token::And)); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", found_token, - token_str).index(&FullRange)) + token_str)[]) } } } @@ -652,9 +652,9 @@ fn expect_or(&mut self) { let found_token = self.this_token_to_string(); let token_str = Parser::token_to_string(&token::BinOp(token::Or)); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", token_str, - found_token).index(&FullRange)) + found_token)[]) } } } @@ -695,9 +695,9 @@ fn expect_lt(&mut self) { if !self.eat_lt() { let found_token = self.this_token_to_string(); let token_str = Parser::token_to_string(&token::Lt); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", token_str, - found_token).index(&FullRange)) + found_token)[]) } } @@ -747,9 +747,9 @@ pub fn expect_gt(&mut self) { _ => { let gt_str = Parser::token_to_string(&token::Gt); let this_token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", gt_str, - this_token_str).index(&FullRange)) + this_token_str)[]) } } } @@ -1371,7 +1371,7 @@ pub fn parse_trait_items(&mut self) -> Vec { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(inner_attrs.index(&FullRange)); + attrs.push_all(&inner_attrs[]); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1389,8 +1389,8 @@ pub fn parse_trait_items(&mut self) -> Vec { _ => { let token_str = p.this_token_to_string(); - p.fatal((format!("expected `;` or `{{`, found `{}`", - token_str)).index(&FullRange)) + p.fatal(&format!("expected `;` or `{{`, found `{}`", + token_str)[]) } } } @@ -1586,7 +1586,7 @@ pub fn parse_ty(&mut self) -> P { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(msg.index(&FullRange)); + self.fatal(&msg[]); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1734,8 +1734,7 @@ pub fn lit_from_token(&mut self, tok: &token::Token) -> Lit_ { token::StrRaw(s, n) => { (true, LitStr( - token::intern_and_get_ident( - parse::raw_str_lit(s.as_str()).index(&FullRange)), + token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())[]), ast::RawStr(n))) } token::Binary(i) => @@ -1979,7 +1978,7 @@ pub fn parse_lifetime(&mut self) -> ast::Lifetime { }; } _ => { - self.fatal(format!("expected a lifetime name").index(&FullRange)); + self.fatal(&format!("expected a lifetime name")[]); } } } @@ -2017,7 +2016,7 @@ pub fn parse_lifetime_defs(&mut self) -> Vec { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(msg.index(&FullRange)); + self.fatal(&msg[]); } } } @@ -2501,16 +2500,16 @@ pub fn parse_dot_or_call_expr_with(&mut self, e0: P) -> P { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - format!("unexpected token: `{}`", n.as_str()).index(&FullRange)); + &format!("unexpected token: `{}`", n.as_str())[]); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { let float = match fstr.parse::() { Some(f) => f, None => continue, }; self.span_help(last_span, - format!("try parenthesizing the first index; e.g., `(foo.{}){}`", + &format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as uint, - float.fract().to_string().index(&(1..))).index(&FullRange)); + &float.fract().to_string()[1..])[]); } self.abort_if_errors(); @@ -2655,8 +2654,8 @@ pub fn check_unknown_macro_variable(&mut self) { if self.quote_depth == 0u { match self.token { token::SubstNt(name, _) => - self.fatal(format!("unknown macro variable `{}`", - token::get_ident(name)).index(&FullRange)), + self.fatal(&format!("unknown macro variable `{}`", + token::get_ident(name))[]), _ => {} } } @@ -2717,8 +2716,8 @@ fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree { Some(&sp) => p.span_note(sp, "unclosed delimiter"), }; let token_str = p.this_token_to_string(); - p.fatal(format!("incorrect close delimiter: `{}`", - token_str).index(&FullRange)) + p.fatal(&format!("incorrect close delimiter: `{}`", + token_str)[]) }, /* we ought to allow different depths of unquotation */ token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => { @@ -2858,8 +2857,8 @@ pub fn parse_prefix_expr(&mut self) -> P { let span = self.span; let this_token_to_string = self.this_token_to_string(); self.span_err(span, - format!("expected expression, found `{}`", - this_token_to_string).index(&FullRange)); + &format!("expected expression, found `{}`", + this_token_to_string)[]); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3241,8 +3240,8 @@ fn parse_pat_fields(&mut self) -> (Vec> , bool) self.bump(); if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", "}", - token_str).index(&FullRange)) + self.fatal(&format!("expected `{}`, found `{}`", "}", + token_str)[]) } etc = true; break; @@ -3262,8 +3261,8 @@ fn parse_pat_fields(&mut self) -> (Vec> , bool) match bind_type { BindByRef(..) | BindByValue(MutMutable) => { let token_str = self.this_token_to_string(); - self.fatal(format!("unexpected `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("unexpected `{}`", + token_str)[]) } _ => {} } @@ -3546,7 +3545,7 @@ fn parse_pat_ident(&mut self, let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected identifier, found `{}`", tok_str).index(&FullRange)); + &format!("expected identifier, found `{}`", tok_str)[]); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3643,7 +3642,7 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) { let lo = self.span.lo; if self.token.is_keyword(keywords::Let) { - check_expected_item(self, item_attrs.index(&FullRange)); + check_expected_item(self, &item_attrs[]); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3652,7 +3651,7 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, item_attrs.index(&FullRange)); + check_expected_item(self, &item_attrs[]); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3678,9 +3677,9 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) { "" }; let tok_str = self.this_token_to_string(); - self.fatal(format!("expected {}`(` or `{{`, found `{}`", + self.fatal(&format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str).index(&FullRange)) + tok_str)[]) }, }; @@ -3728,7 +3727,7 @@ fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(item_attrs.index(&FullRange)); + let item_err = Parser::expected_item_err(&item_attrs[]); match self.parse_item_or_view_item(item_attrs, false) { IoviItem(i) => { let hi = i.span.hi; @@ -3772,7 +3771,7 @@ pub fn parse_block(&mut self) -> P { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - format!("expected `{{`, found `{}`", tok).index(&FullRange), + &format!("expected `{{`, found `{}`", tok)[], "place this code inside a block"); } @@ -3826,13 +3825,13 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode, while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes().index(&FullRange)); + attributes_box.push_all(&self.parse_outer_attributes()[]); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box.index(&FullRange))); + Parser::expected_item_err(&attributes_box[])); attributes_box = Vec::new(); } self.bump(); // empty @@ -3924,7 +3923,7 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode, if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box.index(&FullRange))); + Parser::expected_item_err(&attributes_box[])); } let hi = self.span.hi; @@ -4367,8 +4366,8 @@ fn expect_self_ident(&mut self) -> ast::Ident { }, _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `self`, found `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("expected `self`, found `{}`", + token_str)[]) } } } @@ -4521,8 +4520,8 @@ macro_rules! parse_remaining_arguments { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `,` or `)`, found `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("expected `,` or `)`, found `{}`", + token_str)[]) } } } @@ -4698,7 +4697,7 @@ pub fn parse_method(&mut self, let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(inner_attrs.index(&FullRange)); + new_attrs.push_all(&inner_attrs[]); (ast::MethDecl(ident, generics, abi, @@ -4915,17 +4914,17 @@ pub fn parse_record_struct_body(&mut self, class_name: &ast::Ident) -> Vec;` } else { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `where`, `{}`, `(`, or `;` after struct \ - name, found `{}`", "{", token_str).index(&FullRange)); + self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \ + name, found `{}`", "{", token_str)[]); } } @@ -4990,8 +4989,8 @@ pub fn parse_single_struct_field(&mut self, let span = self.span; let token_str = self.this_token_to_string(); self.span_fatal_help(span, - format!("expected `,`, or `}}`, found `{}`", - token_str).index(&FullRange), + &format!("expected `,`, or `}}`, found `{}`", + token_str)[], "struct fields should be separated by commas") } } @@ -5078,7 +5077,7 @@ fn parse_mod_items(&mut self, let mut attrs = self.parse_outer_attributes(); if first { let mut tmp = attrs_remaining.clone(); - tmp.push_all(attrs.index(&FullRange)); + tmp.push_all(&attrs[]); attrs = tmp; first = false; } @@ -5094,8 +5093,8 @@ fn parse_mod_items(&mut self, } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected item, found `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("expected item, found `{}`", + token_str)[]) } } } @@ -5104,7 +5103,7 @@ fn parse_mod_items(&mut self, // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining.index(&FullRange))); + Parser::expected_item_err(&attrs_remaining[])); } ast::Mod { @@ -5174,7 +5173,7 @@ fn eval_src_mod(&mut self, -> (ast::Item_, Vec ) { let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack.index(&FullRange)); + let mod_path = Path::new(".").join_many(&self.mod_path_stack[]); let dir_path = prefix.join(&mod_path); let mod_string = token::get_ident(id); let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( @@ -5184,8 +5183,8 @@ fn eval_src_mod(&mut self, let mod_name = mod_string.get().to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(default_path_str.index(&FullRange)); - let secondary_path = dir_path.join(secondary_path_str.index(&FullRange)); + let default_path = dir_path.join(&default_path_str[]); + let secondary_path = dir_path.join(&secondary_path_str[]); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5197,16 +5196,16 @@ fn eval_src_mod(&mut self, None => self.root_module_name.as_ref().unwrap().clone(), }; self.span_note(id_sp, - format!("maybe move this module `{0}` \ + &format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module).index(&FullRange)); + this_module)[]); if default_exists || secondary_exists { self.span_note(id_sp, - format!("... or maybe `use` the module \ + &format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name).index(&FullRange)); + mod_name)[]); } self.abort_if_errors(); } @@ -5216,22 +5215,22 @@ fn eval_src_mod(&mut self, (false, true) => (secondary_path, true), (false, false) => { self.span_fatal_help(id_sp, - format!("file not found for module `{}`", - mod_name).index(&FullRange), - format!("name the file either {} or {} inside \ + &format!("file not found for module `{}`", + mod_name)[], + &format!("name the file either {} or {} inside \ the directory {:?}", default_path_str, secondary_path_str, - dir_path.display()).index(&FullRange)); + dir_path.display())[]); } (true, true) => { self.span_fatal_help( id_sp, - format!("file for module `{}` found at both {} \ + &format!("file for module `{}` found at both {} \ and {}", mod_name, default_path_str, - secondary_path_str).index(&FullRange), + secondary_path_str)[], "delete or rename one of them to remove the ambiguity"); } } @@ -5253,11 +5252,11 @@ fn eval_src_mod_from_path(&mut self, let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { - err.push_str(p.display().as_cow().index(&FullRange)); + err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } - err.push_str(path.display().as_cow().index(&FullRange)); - self.span_fatal(id_sp, err.index(&FullRange)); + err.push_str(&path.display().as_cow()[]); + self.span_fatal(id_sp, &err[]); } None => () } @@ -5338,7 +5337,7 @@ fn parse_foreign_mod_items(&mut self, if !attrs_remaining.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining.index(&FullRange))); + Parser::expected_item_err(&attrs_remaining[])); } assert!(self.token == token::CloseDelim(token::Brace)); ast::ForeignMod { @@ -5377,9 +5376,9 @@ fn parse_item_extern_crate(&mut self, self.span_err(span, "expected `;`, found `as`"); self.span_help(span, - format!("perhaps you meant to enclose the crate name `{}` in \ + &format!("perhaps you meant to enclose the crate name `{}` in \ a string?", - the_ident.as_str()).index(&FullRange)); + the_ident.as_str())[]); None } else { None @@ -5403,9 +5402,9 @@ fn parse_item_extern_crate(&mut self, let span = self.span; let token_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected extern crate name but \ + &format!("expected extern crate name but \ found `{}`", - token_str).index(&FullRange)); + token_str)[]); } }; @@ -5501,9 +5500,9 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef { let struct_def = self.parse_struct_def(); if struct_def.fields.len() == 0 { self.span_err(start_span, - format!("unit-like struct variant should be written \ + &format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident)).index(&FullRange)); + token::get_ident(ident))[]); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5585,10 +5584,10 @@ fn parse_opt_abi(&mut self) -> Option { let last_span = self.last_span; self.span_err( last_span, - format!("illegal ABI: expected one of [{}], \ + &format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string).index(&FullRange)); + the_string)[]); None } } @@ -5647,10 +5646,10 @@ fn parse_item_or_view_item(&mut self, if next_is_mod { let last_span = self.last_span; self.span_err(mk_sp(lo, last_span.hi), - format!("`extern mod` is obsolete, use \ + &format!("`extern mod` is obsolete, use \ `extern crate` instead \ to refer to external \ - crates.").index(&FullRange)) + crates.")[]) } return self.parse_item_extern_crate(lo, visibility, attrs); } @@ -5677,8 +5676,8 @@ fn parse_item_or_view_item(&mut self, let span = self.span; let token_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected `{}` or `fn`, found `{}`", "{", - token_str).index(&FullRange)); + &format!("expected `{}` or `fn`, found `{}`", "{", + token_str)[]); } if self.eat_keyword(keywords::Virtual) { @@ -5791,7 +5790,7 @@ fn parse_item_or_view_item(&mut self, if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(attrs.index(&FullRange)); + self.parse_item_mod(&attrs[]); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6131,7 +6130,7 @@ fn parse_items_and_view_items(&mut self, macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes().index(&FullRange)); + attrs.push_all(&self.parse_outer_attributes()[]); // First, parse view items. let mut view_items : Vec = Vec::new(); let mut items = Vec::new(); @@ -6213,7 +6212,7 @@ fn parse_foreign_items(&mut self, first_item_attrs: Vec , macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes().index(&FullRange)); + attrs.push_all(&self.parse_outer_attributes()[]); let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 43786738910..4b3573f84c5 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -480,7 +480,7 @@ fn mk_fresh_ident_interner() -> IdentInterner { $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(init_vec.index(&FullRange)) + interner::StrInterner::prefill(&init_vec[]) } }} @@ -629,7 +629,7 @@ fn new_from_rc_str(string: RcStr) -> InternedString { #[inline] pub fn get<'a>(&'a self) -> &'a str { - self.string.index(&FullRange) + &self.string[] } } @@ -659,41 +659,41 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { impl fmt::String for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.string.index(&FullRange)) + write!(f, "{}", &self.string[]) } } impl<'a> PartialEq<&'a str> for InternedString { #[inline(always)] fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(self.string.index(&FullRange), *other) + PartialEq::eq(&self.string[], *other) } #[inline(always)] fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(self.string.index(&FullRange), *other) + PartialEq::ne(&self.string[], *other) } } impl<'a> PartialEq for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, other.string.index(&FullRange)) + PartialEq::eq(*self, &other.string[]) } #[inline(always)] fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, other.string.index(&FullRange)) + PartialEq::ne(*self, &other.string[]) } } impl Decodable for InternedString { fn decode(d: &mut D) -> Result { - Ok(get_name(get_ident_interner().intern(try!(d.read_str()).index(&FullRange)))) + Ok(get_name(get_ident_interner().intern(&try!(d.read_str())[]))) } } impl Encodable for InternedString { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.string.index(&FullRange)) + s.emit_str(&self.string[]) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 52306075c21..b69b812c958 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -138,9 +138,9 @@ pub fn buf_str(toks: Vec, if i != left { s.push_str(", "); } - s.push_str(format!("{}={}", + s.push_str(&format!("{}={}", szs[i], - tok_str(toks[i].clone())).index(&FullRange)); + tok_str(toks[i].clone()))[]); i += 1u; i %= n; } @@ -602,7 +602,7 @@ pub fn print(&mut self, x: Token, l: int) -> io::IoResult<()> { assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(s.index(&FullRange)) + self.print_str(&s[]) } Eof => { // Eof should never get here. diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 87dcc9e70f4..9b6f8e6002d 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -114,7 +114,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, out, ann, is_expanded); - try!(s.print_mod(&krate.module, krate.attrs.index(&FullRange))); + try!(s.print_mod(&krate.module, &krate.attrs[])); try!(s.print_remaining_comments()); eof(&mut s.s) } @@ -580,7 +580,7 @@ pub fn break_offset_if_not_bol(&mut self, n: uint, pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); - try!(word(&mut self.s, text.index(&FullRange))); + try!(word(&mut self.s, &text[])); try!(space(&mut self.s)); word(&mut self.s, "*/") } @@ -685,7 +685,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> { } ast::TyTup(ref elts) => { try!(self.popen()); - try!(self.commasep(Inconsistent, elts.index(&FullRange), + try!(self.commasep(Inconsistent, &elts[], |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -721,10 +721,10 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> { } ast::TyObjectSum(ref ty, ref bounds) => { try!(self.print_type(&**ty)); - try!(self.print_bounds("+", bounds.index(&FullRange))); + try!(self.print_bounds("+", &bounds[])); } ast::TyPolyTraitRef(ref bounds) => { - try!(self.print_bounds("", bounds.index(&FullRange))); + try!(self.print_bounds("", &bounds[])); } ast::TyQPath(ref qpath) => { try!(word(&mut self.s, "<")); @@ -759,7 +759,7 @@ pub fn print_foreign_item(&mut self, item: &ast::ForeignItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&item.attrs[])); match item.node { ast::ForeignItemFn(ref decl, ref generics) => { try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics, @@ -769,8 +769,8 @@ pub fn print_foreign_item(&mut self, self.end() // end the outer fn box } ast::ForeignItemStatic(ref t, m) => { - try!(self.head(visibility_qualified(item.vis, - "static").index(&FullRange))); + try!(self.head(&visibility_qualified(item.vis, + "static")[])); if m { try!(self.word_space("mut")); } @@ -787,7 +787,7 @@ pub fn print_foreign_item(&mut self, fn print_associated_type(&mut self, typedef: &ast::AssociatedType) -> IoResult<()> { - try!(self.print_outer_attributes(typedef.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&typedef.attrs[])); try!(self.word_space("type")); try!(self.print_ty_param(&typedef.ty_param)); word(&mut self.s, ";") @@ -806,12 +806,12 @@ fn print_typedef(&mut self, typedef: &ast::Typedef) -> IoResult<()> { pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&item.attrs[])); try!(self.ann.pre(self, NodeItem(item))); match item.node { ast::ItemStatic(ref ty, m, ref expr) => { - try!(self.head(visibility_qualified(item.vis, - "static").index(&FullRange))); + try!(self.head(&visibility_qualified(item.vis, + "static")[])); if m == ast::MutMutable { try!(self.word_space("mut")); } @@ -827,8 +827,8 @@ pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(self.end()); // end the outer cbox } ast::ItemConst(ref ty, ref expr) => { - try!(self.head(visibility_qualified(item.vis, - "const").index(&FullRange))); + try!(self.head(&visibility_qualified(item.vis, + "const")[])); try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**ty)); @@ -851,29 +851,28 @@ pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { item.vis )); try!(word(&mut self.s, " ")); - try!(self.print_block_with_attrs(&**body, item.attrs.index(&FullRange))); + try!(self.print_block_with_attrs(&**body, &item.attrs[])); } ast::ItemMod(ref _mod) => { - try!(self.head(visibility_qualified(item.vis, - "mod").index(&FullRange))); + try!(self.head(&visibility_qualified(item.vis, + "mod")[])); try!(self.print_ident(item.ident)); try!(self.nbsp()); try!(self.bopen()); - try!(self.print_mod(_mod, item.attrs.index(&FullRange))); + try!(self.print_mod(_mod, &item.attrs[])); try!(self.bclose(item.span)); } ast::ItemForeignMod(ref nmod) => { try!(self.head("extern")); - try!(self.word_nbsp(nmod.abi.to_string().index(&FullRange))); + try!(self.word_nbsp(&nmod.abi.to_string()[])); try!(self.bopen()); - try!(self.print_foreign_mod(nmod, item.attrs.index(&FullRange))); + try!(self.print_foreign_mod(nmod, &item.attrs[])); try!(self.bclose(item.span)); } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(indent_unit)); try!(self.ibox(0u)); - try!(self.word_nbsp(visibility_qualified(item.vis, - "type").index(&FullRange))); + try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[])); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); try!(self.end()); // end the inner ibox @@ -895,7 +894,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { )); } ast::ItemStruct(ref struct_def, ref generics) => { - try!(self.head(visibility_qualified(item.vis,"struct").index(&FullRange))); + try!(self.head(&visibility_qualified(item.vis,"struct")[])); try!(self.print_struct(&**struct_def, generics, item.ident, item.span)); } @@ -936,7 +935,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(space(&mut self.s)); try!(self.bopen()); - try!(self.print_inner_attributes(item.attrs.index(&FullRange))); + try!(self.print_inner_attributes(&item.attrs[])); for impl_item in impl_items.iter() { match *impl_item { ast::MethodImplItem(ref meth) => { @@ -967,7 +966,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { real_bounds.push(b); } } - try!(self.print_bounds(":", real_bounds.index(&FullRange))); + try!(self.print_bounds(":", &real_bounds[])); try!(self.print_where_clause(generics)); try!(word(&mut self.s, " ")); try!(self.bopen()); @@ -985,7 +984,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(self.print_ident(item.ident)); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(tts.index(&FullRange))); + try!(self.print_tts(&tts[])); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()); @@ -1019,12 +1018,12 @@ pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef, generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, visibility: ast::Visibility) -> IoResult<()> { - try!(self.head(visibility_qualified(visibility, "enum").index(&FullRange))); + try!(self.head(&visibility_qualified(visibility, "enum")[])); try!(self.print_ident(ident)); try!(self.print_generics(generics)); try!(self.print_where_clause(generics)); try!(space(&mut self.s)); - self.print_variants(enum_definition.variants.index(&FullRange), span) + self.print_variants(&enum_definition.variants[], span) } pub fn print_variants(&mut self, @@ -1034,7 +1033,7 @@ pub fn print_variants(&mut self, for v in variants.iter() { try!(self.space_if_not_bol()); try!(self.maybe_print_comment(v.span.lo)); - try!(self.print_outer_attributes(v.node.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&v.node.attrs[])); try!(self.ibox(indent_unit)); try!(self.print_variant(&**v)); try!(word(&mut self.s, ",")); @@ -1062,7 +1061,7 @@ pub fn print_struct(&mut self, if !struct_def.fields.is_empty() { try!(self.popen()); try!(self.commasep( - Inconsistent, struct_def.fields.index(&FullRange), + Inconsistent, &struct_def.fields[], |s, field| { match field.node.kind { ast::NamedField(..) => panic!("unexpected named field"), @@ -1092,7 +1091,7 @@ pub fn print_struct(&mut self, ast::NamedField(ident, visibility) => { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(field.span.lo)); - try!(self.print_outer_attributes(field.node.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&field.node.attrs[])); try!(self.print_visibility(visibility)); try!(self.print_ident(ident)); try!(self.word_nbsp(":")); @@ -1116,7 +1115,7 @@ pub fn print_struct(&mut self, pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { ast::TtToken(_, ref tk) => { - try!(word(&mut self.s, token_to_string(tk).index(&FullRange))); + try!(word(&mut self.s, &token_to_string(tk)[])); match *tk { parse::token::DocComment(..) => { hardbreak(&mut self.s) @@ -1125,11 +1124,11 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { } } ast::TtDelimited(_, ref delimed) => { - try!(word(&mut self.s, token_to_string(&delimed.open_token()).index(&FullRange))); + try!(word(&mut self.s, &token_to_string(&delimed.open_token())[])); try!(space(&mut self.s)); - try!(self.print_tts(delimed.tts.index(&FullRange))); + try!(self.print_tts(&delimed.tts[])); try!(space(&mut self.s)); - word(&mut self.s, token_to_string(&delimed.close_token()).index(&FullRange)) + word(&mut self.s, &token_to_string(&delimed.close_token())[]) }, ast::TtSequence(_, ref seq) => { try!(word(&mut self.s, "$(")); @@ -1139,7 +1138,7 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { try!(word(&mut self.s, ")")); match seq.separator { Some(ref tk) => { - try!(word(&mut self.s, token_to_string(tk).index(&FullRange))); + try!(word(&mut self.s, &token_to_string(tk)[])); } None => {}, } @@ -1170,7 +1169,7 @@ pub fn print_variant(&mut self, v: &ast::Variant) -> IoResult<()> { if !args.is_empty() { try!(self.popen()); try!(self.commasep(Consistent, - args.index(&FullRange), + &args[], |s, arg| s.print_type(&*arg.ty))); try!(self.pclose()); } @@ -1194,7 +1193,7 @@ pub fn print_variant(&mut self, v: &ast::Variant) -> IoResult<()> { pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(m.span.lo)); - try!(self.print_outer_attributes(m.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&m.attrs[])); try!(self.print_ty_fn(None, None, m.unsafety, @@ -1226,7 +1225,7 @@ pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> IoResult<()> { pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(meth.span.lo)); - try!(self.print_outer_attributes(meth.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&meth.attrs[])); match meth.node { ast::MethDecl(ident, ref generics, @@ -1244,7 +1243,7 @@ pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> { Some(&explicit_self.node), vis)); try!(word(&mut self.s, " ")); - self.print_block_with_attrs(&**body, meth.attrs.index(&FullRange)) + self.print_block_with_attrs(&**body, &meth.attrs[]) }, ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _), ..}) => { @@ -1253,7 +1252,7 @@ pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> { try!(word(&mut self.s, "! ")); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(tts.index(&FullRange))); + try!(self.print_tts(&tts[])); try!(self.pclose()); try!(word(&mut self.s, ";")); self.end() @@ -1520,7 +1519,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { ast::ExprVec(ref exprs) => { try!(self.ibox(indent_unit)); try!(word(&mut self.s, "[")); - try!(self.commasep_exprs(Inconsistent, exprs.index(&FullRange))); + try!(self.commasep_exprs(Inconsistent, &exprs[])); try!(word(&mut self.s, "]")); try!(self.end()); } @@ -1541,7 +1540,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { try!(word(&mut self.s, "{")); try!(self.commasep_cmnt( Consistent, - fields.index(&FullRange), + &fields[], |s, field| { try!(s.ibox(indent_unit)); try!(s.print_ident(field.ident.node)); @@ -1568,7 +1567,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { } ast::ExprTup(ref exprs) => { try!(self.popen()); - try!(self.commasep_exprs(Inconsistent, exprs.index(&FullRange))); + try!(self.commasep_exprs(Inconsistent, &exprs[])); if exprs.len() == 1 { try!(word(&mut self.s, ",")); } @@ -1576,7 +1575,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { } ast::ExprCall(ref func, ref args) => { try!(self.print_expr_maybe_paren(&**func)); - try!(self.print_call_post(args.index(&FullRange))); + try!(self.print_call_post(&args[])); } ast::ExprMethodCall(ident, ref tys, ref args) => { let base_args = args.slice_from(1); @@ -1585,7 +1584,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { try!(self.print_ident(ident.node)); if tys.len() > 0u { try!(word(&mut self.s, "::<")); - try!(self.commasep(Inconsistent, tys.index(&FullRange), + try!(self.commasep(Inconsistent, &tys[], |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } @@ -1782,11 +1781,11 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { try!(self.print_string(a.asm.get(), a.asm_str_style)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.outputs.index(&FullRange), + try!(self.commasep(Inconsistent, &a.outputs[], |s, &(ref co, ref o, is_rw)| { match co.get().slice_shift_char() { Some(('=', operand)) if is_rw => { - try!(s.print_string(format!("+{}", operand).index(&FullRange), + try!(s.print_string(&format!("+{}", operand)[], ast::CookedStr)) } _ => try!(s.print_string(co.get(), ast::CookedStr)) @@ -1799,7 +1798,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.inputs.index(&FullRange), + try!(self.commasep(Inconsistent, &a.inputs[], |s, &(ref co, ref o)| { try!(s.print_string(co.get(), ast::CookedStr)); try!(s.popen()); @@ -1810,7 +1809,7 @@ pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.clobbers.index(&FullRange), + try!(self.commasep(Inconsistent, &a.clobbers[], |s, co| { try!(s.print_string(co.get(), ast::CookedStr)); Ok(()) @@ -1884,7 +1883,7 @@ pub fn print_decl(&mut self, decl: &ast::Decl) -> IoResult<()> { pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { if self.encode_idents_with_hygiene { let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, encoded.index(&FullRange))) + try!(word(&mut self.s, &encoded[])) } else { try!(word(&mut self.s, token::get_ident(ident).get())) } @@ -1892,7 +1891,7 @@ pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { } pub fn print_uint(&mut self, i: uint) -> IoResult<()> { - word(&mut self.s, i.to_string().index(&FullRange)) + word(&mut self.s, &i.to_string()[]) } pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { @@ -1966,7 +1965,7 @@ fn print_path_parameters(&mut self, } try!(self.commasep( Inconsistent, - data.types.index(&FullRange), + &data.types[], |s, ty| s.print_type(&**ty))); comma = true; } @@ -1989,7 +1988,7 @@ fn print_path_parameters(&mut self, try!(word(&mut self.s, "(")); try!(self.commasep( Inconsistent, - data.inputs.index(&FullRange), + &data.inputs[], |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ")")); @@ -2042,7 +2041,7 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> { Some(ref args) => { if !args.is_empty() { try!(self.popen()); - try!(self.commasep(Inconsistent, args.index(&FullRange), + try!(self.commasep(Inconsistent, &args[], |s, p| s.print_pat(&**p))); try!(self.pclose()); } @@ -2054,7 +2053,7 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> { try!(self.nbsp()); try!(self.word_space("{")); try!(self.commasep_cmnt( - Consistent, fields.index(&FullRange), + Consistent, &fields[], |s, f| { try!(s.cbox(indent_unit)); if !f.node.is_shorthand { @@ -2075,7 +2074,7 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> { ast::PatTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, - elts.index(&FullRange), + &elts[], |s, p| s.print_pat(&**p))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -2103,7 +2102,7 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> { ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, - before.index(&FullRange), + &before[], |s, p| s.print_pat(&**p))); for p in slice.iter() { if !before.is_empty() { try!(self.word_space(",")); } @@ -2117,7 +2116,7 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> { if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, - after.index(&FullRange), + &after[], |s, p| s.print_pat(&**p))); try!(word(&mut self.s, "]")); } @@ -2134,7 +2133,7 @@ fn print_arm(&mut self, arm: &ast::Arm) -> IoResult<()> { } try!(self.cbox(indent_unit)); try!(self.ibox(0u)); - try!(self.print_outer_attributes(arm.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&arm.attrs[])); let mut first = true; for p in arm.pats.iter() { if first { @@ -2234,7 +2233,7 @@ pub fn print_fn_args(&mut self, decl: &ast::FnDecl, // HACK(eddyb) ignore the separately printed self argument. let args = if first { - decl.inputs.index(&FullRange) + &decl.inputs[] } else { decl.inputs.slice_from(1) }; @@ -2400,7 +2399,7 @@ pub fn print_generics(&mut self, ints.push(i); } - try!(self.commasep(Inconsistent, ints.index(&FullRange), |s, &idx| { + try!(self.commasep(Inconsistent, &ints[], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) @@ -2417,7 +2416,7 @@ pub fn print_generics(&mut self, pub fn print_ty_param(&mut self, param: &ast::TyParam) -> IoResult<()> { try!(self.print_ident(param.ident)); - try!(self.print_bounds(":", param.bounds.index(&FullRange))); + try!(self.print_bounds(":", ¶m.bounds[])); match param.default { Some(ref default) => { try!(space(&mut self.s)); @@ -2493,7 +2492,7 @@ pub fn print_meta_item(&mut self, item: &ast::MetaItem) -> IoResult<()> { try!(word(&mut self.s, name.get())); try!(self.popen()); try!(self.commasep(Consistent, - items.index(&FullRange), + &items[], |s, i| s.print_meta_item(&**i))); try!(self.pclose()); } @@ -2529,7 +2528,7 @@ pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> IoResult<()> { try!(self.print_path(path, false)); try!(word(&mut self.s, "::{")); } - try!(self.commasep(Inconsistent, idents.index(&FullRange), |s, w| { + try!(self.commasep(Inconsistent, &idents[], |s, w| { match w.node { ast::PathListIdent { name, .. } => { s.print_ident(name) @@ -2547,7 +2546,7 @@ pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> IoResult<()> { pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs.index(&FullRange))); + try!(self.print_outer_attributes(&item.attrs[])); try!(self.print_visibility(item.vis)); match item.node { ast::ViewItemExternCrate(id, ref optional_path, _) => { @@ -2689,7 +2688,7 @@ pub fn print_ty_fn(&mut self, try!(self.pclose()); } - try!(self.print_bounds(":", bounds.index(&FullRange))); + try!(self.print_bounds(":", &bounds[])); try!(self.print_fn_output(decl)); @@ -2748,7 +2747,7 @@ pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> { try!(self.maybe_print_comment(lit.span.lo)); match self.next_lit(lit.span.lo) { Some(ref ltrl) => { - return word(&mut self.s, (*ltrl).lit.index(&FullRange)); + return word(&mut self.s, &(*ltrl).lit[]); } _ => () } @@ -2758,7 +2757,7 @@ pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> { let mut res = String::from_str("b'"); ascii::escape_default(byte, |c| res.push(c as char)); res.push('\''); - word(&mut self.s, res.index(&FullRange)) + word(&mut self.s, &res[]) } ast::LitChar(ch) => { let mut res = String::from_str("'"); @@ -2766,36 +2765,36 @@ pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> { res.push(c); } res.push('\''); - word(&mut self.s, res.index(&FullRange)) + word(&mut self.s, &res[]) } ast::LitInt(i, t) => { match t { ast::SignedIntLit(st, ast::Plus) => { word(&mut self.s, - ast_util::int_ty_to_string(st, Some(i as i64)).index(&FullRange)) + &ast_util::int_ty_to_string(st, Some(i as i64))[]) } ast::SignedIntLit(st, ast::Minus) => { let istr = ast_util::int_ty_to_string(st, Some(-(i as i64))); word(&mut self.s, - format!("-{}", istr).index(&FullRange)) + &format!("-{}", istr)[]) } ast::UnsignedIntLit(ut) => { word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice()) } ast::UnsuffixedIntLit(ast::Plus) => { - word(&mut self.s, format!("{}", i).index(&FullRange)) + word(&mut self.s, &format!("{}", i)[]) } ast::UnsuffixedIntLit(ast::Minus) => { - word(&mut self.s, format!("-{}", i).index(&FullRange)) + word(&mut self.s, &format!("-{}", i)[]) } } } ast::LitFloat(ref f, t) => { word(&mut self.s, - format!( + &format!( "{}{}", f.get(), - ast_util::float_ty_to_string(t).index(&FullRange)).index(&FullRange)) + &ast_util::float_ty_to_string(t)[])[]) } ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), ast::LitBool(val) => { @@ -2807,7 +2806,7 @@ pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> { ascii::escape_default(ch as u8, |ch| escaped.push(ch as char)); } - word(&mut self.s, format!("b\"{}\"", escaped).index(&FullRange)) + word(&mut self.s, &format!("b\"{}\"", escaped)[]) } } } @@ -2848,7 +2847,7 @@ pub fn print_comment(&mut self, comments::Mixed => { assert_eq!(cmnt.lines.len(), 1u); try!(zerobreak(&mut self.s)); - try!(word(&mut self.s, cmnt.lines[0].index(&FullRange))); + try!(word(&mut self.s, &cmnt.lines[0][])); zerobreak(&mut self.s) } comments::Isolated => { @@ -2857,7 +2856,7 @@ pub fn print_comment(&mut self, // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { - try!(word(&mut self.s, line.index(&FullRange))); + try!(word(&mut self.s, &line[])); } try!(hardbreak(&mut self.s)); } @@ -2866,13 +2865,13 @@ pub fn print_comment(&mut self, comments::Trailing => { try!(word(&mut self.s, " ")); if cmnt.lines.len() == 1u { - try!(word(&mut self.s, cmnt.lines[0].index(&FullRange))); + try!(word(&mut self.s, &cmnt.lines[0][])); hardbreak(&mut self.s) } else { try!(self.ibox(0u)); for line in cmnt.lines.iter() { if !line.is_empty() { - try!(word(&mut self.s, line.index(&FullRange))); + try!(word(&mut self.s, &line[])); } try!(hardbreak(&mut self.s)); } @@ -2905,7 +2904,7 @@ pub fn print_string(&mut self, st: &str, string=st)) } }; - word(&mut self.s, st.index(&FullRange)) + word(&mut self.s, &st[]) } pub fn next_comment(&mut self) -> Option { @@ -2936,7 +2935,7 @@ pub fn print_opt_abi_and_extern_if_nondefault(&mut self, Some(abi::Rust) => Ok(()), Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string().index(&FullRange)) + self.word_nbsp(&abi.to_string()[]) } None => Ok(()) } @@ -2947,7 +2946,7 @@ pub fn print_extern_opt_abi(&mut self, match opt_abi { Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string().index(&FullRange)) + self.word_nbsp(&abi.to_string()[]) } None => Ok(()) } @@ -2963,7 +2962,7 @@ pub fn print_fn_header_info(&mut self, if abi != abi::Rust { try!(self.word_nbsp("extern")); - try!(self.word_nbsp(abi.to_string().index(&FullRange))); + try!(self.word_nbsp(&abi.to_string()[])); } word(&mut self.s, "fn") diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index daa51203287..77d10482f1e 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -40,7 +40,7 @@ pub fn maybe_inject_prelude(krate: ast::Crate) -> ast::Crate { } fn use_std(krate: &ast::Crate) -> bool { - !attr::contains_name(krate.attrs.index(&FullRange), "no_std") + !attr::contains_name(&krate.attrs[], "no_std") } fn no_prelude(attrs: &[ast::Attribute]) -> bool { @@ -56,7 +56,7 @@ fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { // The name to use in `extern crate "name" as std;` let actual_crate_name = match self.alt_std_name { - Some(ref s) => token::intern_and_get_ident(s.index(&FullRange)), + Some(ref s) => token::intern_and_get_ident(&s[]), None => token::intern_and_get_ident("std"), }; @@ -104,7 +104,7 @@ fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { attr::mark_used(&no_std_attr); krate.attrs.push(no_std_attr); - if !no_prelude(krate.attrs.index(&FullRange)) { + if !no_prelude(&krate.attrs[]) { // only add `use std::prelude::*;` if there wasn't a // `#![no_implicit_prelude]` at the crate level. // fold_mod() will insert glob path. @@ -124,7 +124,7 @@ fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { } fn fold_item(&mut self, item: P) -> SmallVector> { - if !no_prelude(item.attrs.index(&FullRange)) { + if !no_prelude(&item.attrs[]) { // only recur if there wasn't `#![no_implicit_prelude]` // on this item, i.e. this means that the prelude is not // implicitly imported though the whole subtree diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 711715355e9..bacfa0bbfce 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -73,14 +73,14 @@ pub fn modify_for_testing(sess: &ParseSess, // We generate the test harness when building in the 'test' // configuration, either with the '--test' or '--cfg test' // command line options. - let should_test = attr::contains_name(krate.config.index(&FullRange), "test"); + let should_test = attr::contains_name(&krate.config[], "test"); // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use some_name = __test::main;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = - attr::first_attr_value_str_by_name(krate.attrs.index(&FullRange), + attr::first_attr_value_str_by_name(&krate.attrs[], "reexport_test_harness_main"); if should_test { @@ -119,7 +119,7 @@ fn fold_item(&mut self, i: P) -> SmallVector> { self.cx.path.push(ident); } debug!("current path: {}", - ast_util::path_name_i(self.cx.path.index(&FullRange))); + ast_util::path_name_i(&self.cx.path[])); if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) { match i.node { @@ -277,8 +277,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { // When not compiling with --test we should not compile the // #[test] functions config::strip_items(krate, |attrs| { - !attr::contains_name(attrs.index(&FullRange), "test") && - !attr::contains_name(attrs.index(&FullRange), "bench") + !attr::contains_name(&attrs[], "test") && + !attr::contains_name(&attrs[], "bench") }) } @@ -291,7 +291,7 @@ enum HasTestSignature { fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_test_attr = attr::contains_name(i.attrs.index(&FullRange), "test"); + let has_test_attr = attr::contains_name(&i.attrs[], "test"); fn has_test_signature(i: &ast::Item) -> HasTestSignature { match &i.node { @@ -329,7 +329,7 @@ fn has_test_signature(i: &ast::Item) -> HasTestSignature { } fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_bench_attr = attr::contains_name(i.attrs.index(&FullRange), "bench"); + let has_bench_attr = attr::contains_name(&i.attrs[], "bench"); fn has_test_signature(i: &ast::Item) -> bool { match i.node { @@ -384,7 +384,7 @@ fn should_fail(i: &ast::Item) -> ShouldFail { mod __test { extern crate test (name = "test", vers = "..."); fn main() { - test::test_main_static(::os::args().index(&FullRange), tests) + test::test_main_static(&::os::args()[], tests) } static tests : &'static [test::TestDescAndFn] = &[ @@ -510,8 +510,8 @@ fn mk_tests(cx: &TestCtxt) -> P { } fn is_test_crate(krate: &ast::Crate) -> bool { - match attr::find_crate_name(krate.attrs.index(&FullRange)) { - Some(ref s) if "test" == s.get().index(&FullRange) => true, + match attr::find_crate_name(&krate.attrs[]) { + Some(ref s) if "test" == &s.get()[] => true, _ => false } } @@ -551,11 +551,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // creates $name: $expr let field = |&: name, expr| ecx.field_imm(span, ecx.ident_of(name), expr); - debug!("encoding {}", ast_util::path_name_i(path.index(&FullRange))); + debug!("encoding {}", ast_util::path_name_i(&path[])); // path to the #[test] function: "foo::bar::baz" - let path_string = ast_util::path_name_i(path.index(&FullRange)); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.index(&FullRange))); + let path_string = ast_util::path_name_i(&path[]); + let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[])); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 93de342d487..1efbcf073c6 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -28,7 +28,7 @@ pub struct Interner { vect: RefCell >, } -// when traits can extend traits, we should extend index to get .index(&FullRange) +// when traits can extend traits, we should extend index to get [] impl Interner { pub fn new() -> Interner { Interner { @@ -109,27 +109,27 @@ impl Eq for RcStr {} impl Ord for RcStr { fn cmp(&self, other: &RcStr) -> Ordering { - self.index(&FullRange).cmp(other.index(&FullRange)) + self[].cmp(&other[]) } } impl fmt::Show for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Show; - self.index(&FullRange).fmt(f) + self[].fmt(f) } } impl BorrowFrom for str { fn borrow_from(owned: &RcStr) -> &str { - owned.string.index(&FullRange) + &owned.string[] } } impl Deref for RcStr { type Target = str; - fn deref(&self) -> &str { self.string.index(&FullRange) } + fn deref(&self) -> &str { &self.string[] } } /// A StrInterner differs from Interner in that it accepts @@ -139,7 +139,7 @@ pub struct StrInterner { vect: RefCell >, } -/// When traits can extend traits, we should extend index to get .index(&FullRange) +/// When traits can extend traits, we should extend index to get [] impl StrInterner { pub fn new() -> StrInterner { StrInterner { diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs index f2dcdc6160a..4933938f338 100644 --- a/src/libterm/terminfo/mod.rs +++ b/src/libterm/terminfo/mod.rs @@ -180,7 +180,7 @@ pub fn new(out: T) -> Option+Send+'static>> { } }; - let entry = open(term.index(&FullRange)); + let entry = open(&term[]); if entry.is_err() { if os::getenv("MSYSCON").map_or(false, |s| { "mintty.exe" == s diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs index 7a06849abd1..4735b6e8f2a 100644 --- a/src/libterm/terminfo/parser/compiled.rs +++ b/src/libterm/terminfo/parser/compiled.rs @@ -284,13 +284,13 @@ macro_rules! try { ($e:expr) => ( // Find the offset of the NUL we want to go to - let nulpos = string_table.index(&((offset as uint) .. (string_table_bytes as uint))) + let nulpos = string_table[(offset as uint) .. (string_table_bytes as uint)] .iter().position(|&b| b == 0); match nulpos { Some(len) => { string_map.insert(name.to_string(), - string_table.index(&((offset as uint) .. - (offset as uint + len))).to_vec()) + string_table[(offset as uint) .. + (offset as uint + len)].to_vec()) }, None => { return Err("invalid file: missing NUL in \ diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index 2651be1ebb8..1fca3c62f78 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -61,13 +61,13 @@ pub fn get_dbpath_for_term(term: &str) -> Option> { for p in dirs_to_search.iter() { if p.exists() { let f = first_char.to_string(); - let newp = p.join_many(&[f.index(&FullRange), term]); + let newp = p.join_many(&[&f[], term]); if newp.exists() { return Some(box newp); } // on some installations the dir is named after the hex of the char (e.g. OS X) let f = format!("{:x}", first_char as uint); - let newp = p.join_many(&[f.index(&FullRange), term]); + let newp = p.join_many(&[&f[], term]); if newp.exists() { return Some(box newp); } diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index 68d06cc4dab..3d3e499bb23 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -948,7 +948,7 @@ fn should_sort_failures_before_printing_them() { st.write_failures().unwrap(); let s = match st.out { - Raw(ref m) => String::from_utf8_lossy(m.index(&FullRange)), + Raw(ref m) => String::from_utf8_lossy(&m[]), Pretty(_) => unreachable!() }; diff --git a/src/test/run-pass/slice.rs b/src/test/run-pass/slice.rs index fca7daeb07d..6a382c076d7 100644 --- a/src/test/run-pass/slice.rs +++ b/src/test/run-pass/slice.rs @@ -14,7 +14,7 @@ #![feature(associated_types)] extern crate core; -use core::ops::{Index, Range, RangeTo, RangeFrom, FullRange}; +use core::ops::{Index, IndexMut, Range, RangeTo, RangeFrom, FullRange}; static mut COUNT: uint = 0;