}
if done { return; }
- let missing_patterns =
- props.error_patterns.index(&(next_err_idx..));
+ let missing_patterns = &props.error_patterns[next_err_idx..];
if missing_patterns.len() == 1u {
fatal_proc_rec(format!("error pattern '{}' not found!",
missing_patterns[0]).as_slice(),
if extra_bytes > 0 {
let mut last_word = 0u32;
- for (i, &byte) in bytes.index(&((complete_words*4)..)).iter().enumerate() {
+ for (i, &byte) in bytes[(complete_words*4)..].iter().enumerate() {
last_word |= (reverse_bits(byte) as u32) << (i * 8);
}
bitv.storage.push(last_word);
let buf = self.buffer_as_slice();
if contiguous {
let (empty, buf) = buf.split_at(0);
- (buf.index(&(self.tail..self.head)), empty)
+ (&buf[self.tail..self.head], empty)
} else {
let (mid, right) = buf.split_at(self.tail);
let (left, _) = mid.split_at(self.head);
//! #![feature(slicing_syntax)]
//! fn main() {
//! let numbers = [0i, 1i, 2i];
-//! let last_numbers = numbers.index(&(1..3));
+//! let last_numbers = &numbers[1..3];
//! // last_numbers is now &[1i, 2i]
//! }
//! ```
use core::marker::Sized;
use core::mem::size_of;
use core::mem;
-use core::ops::{FnMut, FullRange, Index, IndexMut};
+use core::ops::{FnMut, FullRange};
use core::option::Option::{self, Some, None};
use core::ptr::PtrExt;
use core::ptr;
#[unstable = "trait is unstable"]
impl<T> BorrowFrom<Vec<T>> for [T] {
- fn borrow_from(owned: &Vec<T>) -> &[T] { owned.index(&FullRange) }
+ fn borrow_from(owned: &Vec<T>) -> &[T] { &owned[] }
}
#[unstable = "trait is unstable"]
impl<T> BorrowFromMut<Vec<T>> for [T] {
- fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { owned.index_mut(&FullRange) }
+ fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { &mut owned[] }
}
#[unstable = "trait is unstable"]
use core::prelude::{Ord, FullRange};
use core::default::Default;
use core::mem;
- use core::ops::Index;
use std::iter::RandomAccessIterator;
use std::rand::{Rng, thread_rng};
use std::rc::Rc;
// Test on stack.
let vec_stack: &[_] = &[1i, 2, 3];
- let v_b = vec_stack.index(&(1u..3u)).to_vec();
+ let v_b = vec_stack[1u..3u].to_vec();
assert_eq!(v_b.len(), 2u);
let v_b = v_b.as_slice();
assert_eq!(v_b[0], 2);
// Test `Box<[T]>`
let vec_unique = vec![1i, 2, 3, 4, 5, 6];
- let v_d = vec_unique.index(&(1u..6u)).to_vec();
+ let v_d = vec_unique[1u..6u].to_vec();
assert_eq!(v_d.len(), 5u);
let v_d = v_d.as_slice();
assert_eq!(v_d[0], 2);
#[test]
fn test_slice_from() {
let vec: &[int] = &[1, 2, 3, 4];
- assert_eq!(vec.index(&(0..)), vec);
+ assert_eq!(&vec[0..], vec);
let b: &[int] = &[3, 4];
- assert_eq!(vec.index(&(2..)), b);
+ assert_eq!(&vec[2..], b);
let b: &[int] = &[];
- assert_eq!(vec.index(&(4..)), b);
+ assert_eq!(&vec[4..], b);
}
#[test]
fn test_slice_to() {
let vec: &[int] = &[1, 2, 3, 4];
- assert_eq!(vec.index(&(0..4)), vec);
+ assert_eq!(&vec[0..4], vec);
let b: &[int] = &[1, 2];
- assert_eq!(vec.index(&(0..2)), b);
+ assert_eq!(&vec[0..2], b);
let b: &[int] = &[];
- assert_eq!(vec.index(&(0..0)), b);
+ assert_eq!(&vec[0..0], b);
}
}
assert_eq!(cnt, 3);
- for f in v.index(&(1..3)).iter() {
+ for f in v[1..3].iter() {
assert!(*f == Foo);
cnt += 1;
}
#[unstable = "trait is unstable"]
impl BorrowFrom<String> for str {
- fn borrow_from(owned: &String) -> &str { owned.index(&FullRange) }
+ fn borrow_from(owned: &String) -> &str { &owned[] }
}
#[unstable = "trait is unstable"]
#[unstable = "this functionality may be moved to libunicode"]
fn nfd_chars<'a>(&'a self) -> Decompositions<'a> {
Decompositions {
- iter: self.index(&FullRange).chars(),
+ iter: self[].chars(),
buffer: Vec::new(),
sorted: false,
kind: Canonical
#[unstable = "this functionality may be moved to libunicode"]
fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> {
Decompositions {
- iter: self.index(&FullRange).chars(),
+ iter: self[].chars(),
buffer: Vec::new(),
sorted: false,
kind: Compatible
/// ```
#[stable]
fn contains(&self, pat: &str) -> bool {
- core_str::StrExt::contains(self.index(&FullRange), pat)
+ core_str::StrExt::contains(&self[], pat)
}
/// Returns true if a string contains a char pattern.
/// ```
#[unstable = "might get removed in favour of a more generic contains()"]
fn contains_char<P: CharEq>(&self, pat: P) -> bool {
- core_str::StrExt::contains_char(self.index(&FullRange), pat)
+ core_str::StrExt::contains_char(&self[], pat)
}
/// An iterator over the characters of `self`. Note, this iterates
/// ```
#[stable]
fn chars(&self) -> Chars {
- core_str::StrExt::chars(self.index(&FullRange))
+ core_str::StrExt::chars(&self[])
}
/// An iterator over the bytes of `self`
/// ```
#[stable]
fn bytes(&self) -> Bytes {
- core_str::StrExt::bytes(self.index(&FullRange))
+ core_str::StrExt::bytes(&self[])
}
/// An iterator over the characters of `self` and their byte offsets.
#[stable]
fn char_indices(&self) -> CharIndices {
- core_str::StrExt::char_indices(self.index(&FullRange))
+ core_str::StrExt::char_indices(&self[])
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[stable]
fn split<P: CharEq>(&self, pat: P) -> Split<P> {
- core_str::StrExt::split(self.index(&FullRange), pat)
+ core_str::StrExt::split(&self[], pat)
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[stable]
fn splitn<P: CharEq>(&self, count: uint, pat: P) -> SplitN<P> {
- core_str::StrExt::splitn(self.index(&FullRange), count, pat)
+ core_str::StrExt::splitn(&self[], count, pat)
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[unstable = "might get removed"]
fn split_terminator<P: CharEq>(&self, pat: P) -> SplitTerminator<P> {
- core_str::StrExt::split_terminator(self.index(&FullRange), pat)
+ core_str::StrExt::split_terminator(&self[], pat)
}
/// An iterator over substrings of `self`, separated by characters
/// ```
#[stable]
fn rsplitn<P: CharEq>(&self, count: uint, pat: P) -> RSplitN<P> {
- core_str::StrExt::rsplitn(self.index(&FullRange), count, pat)
+ core_str::StrExt::rsplitn(&self[], count, pat)
}
/// An iterator over the start and end indices of the disjoint
/// ```
#[unstable = "might have its iterator type changed"]
fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> {
- core_str::StrExt::match_indices(self.index(&FullRange), pat)
+ core_str::StrExt::match_indices(&self[], pat)
}
/// An iterator over the substrings of `self` separated by the pattern `sep`.
/// ```
#[unstable = "might get removed in the future in favor of a more generic split()"]
fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> {
- core_str::StrExt::split_str(self.index(&FullRange), pat)
+ core_str::StrExt::split_str(&self[], pat)
}
/// An iterator over the lines of a string (subsequences separated
/// ```
#[stable]
fn lines(&self) -> Lines {
- core_str::StrExt::lines(self.index(&FullRange))
+ core_str::StrExt::lines(&self[])
}
/// An iterator over the lines of a string, separated by either
/// ```
#[stable]
fn lines_any(&self) -> LinesAny {
- core_str::StrExt::lines_any(self.index(&FullRange))
+ core_str::StrExt::lines_any(&self[])
}
/// Returns a slice of the given string from the byte range
/// ```
#[unstable = "use slice notation [a..b] instead"]
fn slice(&self, begin: uint, end: uint) -> &str {
- core_str::StrExt::slice(self.index(&FullRange), begin, end)
+ core_str::StrExt::slice(&self[], begin, end)
}
/// Returns a slice of the string from `begin` to its end.
/// See also `slice`, `slice_to` and `slice_chars`.
#[unstable = "use slice notation [a..] instead"]
fn slice_from(&self, begin: uint) -> &str {
- core_str::StrExt::slice_from(self.index(&FullRange), begin)
+ core_str::StrExt::slice_from(&self[], begin)
}
/// Returns a slice of the string from the beginning to byte
/// See also `slice`, `slice_from` and `slice_chars`.
#[unstable = "use slice notation [0..a] instead"]
fn slice_to(&self, end: uint) -> &str {
- core_str::StrExt::slice_to(self.index(&FullRange), end)
+ core_str::StrExt::slice_to(&self[], end)
}
/// Returns a slice of the string from the character range
/// ```
#[unstable = "may have yet to prove its worth"]
fn slice_chars(&self, begin: uint, end: uint) -> &str {
- core_str::StrExt::slice_chars(self.index(&FullRange), begin, end)
+ core_str::StrExt::slice_chars(&self[], begin, end)
}
/// Takes a bytewise (not UTF-8) slice from a string.
/// the entire slice as well.
#[stable]
unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str {
- core_str::StrExt::slice_unchecked(self.index(&FullRange), begin, end)
+ core_str::StrExt::slice_unchecked(&self[], begin, end)
}
/// Returns true if the pattern `pat` is a prefix of the string.
/// ```
#[stable]
fn starts_with(&self, pat: &str) -> bool {
- core_str::StrExt::starts_with(self.index(&FullRange), pat)
+ core_str::StrExt::starts_with(&self[], pat)
}
/// Returns true if the pattern `pat` is a suffix of the string.
/// ```
#[stable]
fn ends_with(&self, pat: &str) -> bool {
- core_str::StrExt::ends_with(self.index(&FullRange), pat)
+ core_str::StrExt::ends_with(&self[], pat)
}
/// Returns a string with all pre- and suffixes that match
/// ```
#[stable]
fn trim_matches<P: CharEq>(&self, pat: P) -> &str {
- core_str::StrExt::trim_matches(self.index(&FullRange), pat)
+ core_str::StrExt::trim_matches(&self[], pat)
}
/// Returns a string with all prefixes that match
/// ```
#[stable]
fn trim_left_matches<P: CharEq>(&self, pat: P) -> &str {
- core_str::StrExt::trim_left_matches(self.index(&FullRange), pat)
+ core_str::StrExt::trim_left_matches(&self[], pat)
}
/// Returns a string with all suffixes that match
/// ```
#[stable]
fn trim_right_matches<P: CharEq>(&self, pat: P) -> &str {
- core_str::StrExt::trim_right_matches(self.index(&FullRange), pat)
+ core_str::StrExt::trim_right_matches(&self[], pat)
}
/// Check that `index`-th byte lies at the start and/or end of a
/// ```
#[unstable = "naming is uncertain with container conventions"]
fn is_char_boundary(&self, index: uint) -> bool {
- core_str::StrExt::is_char_boundary(self.index(&FullRange), index)
+ core_str::StrExt::is_char_boundary(&self[], index)
}
/// Pluck a character out of a string and return the index of the next
/// If `i` is not the index of the beginning of a valid UTF-8 character.
#[unstable = "naming is uncertain with container conventions"]
fn char_range_at(&self, start: uint) -> CharRange {
- core_str::StrExt::char_range_at(self.index(&FullRange), start)
+ core_str::StrExt::char_range_at(&self[], start)
}
/// Given a byte position and a str, return the previous char and its position.
/// If `i` is not an index following a valid UTF-8 character.
#[unstable = "naming is uncertain with container conventions"]
fn char_range_at_reverse(&self, start: uint) -> CharRange {
- core_str::StrExt::char_range_at_reverse(self.index(&FullRange), start)
+ core_str::StrExt::char_range_at_reverse(&self[], start)
}
/// Plucks the character starting at the `i`th byte of a string.
/// If `i` is not the index of the beginning of a valid UTF-8 character.
#[unstable = "naming is uncertain with container conventions"]
fn char_at(&self, i: uint) -> char {
- core_str::StrExt::char_at(self.index(&FullRange), i)
+ core_str::StrExt::char_at(&self[], i)
}
/// Plucks the character ending at the `i`th byte of a string.
/// If `i` is not an index following a valid UTF-8 character.
#[unstable = "naming is uncertain with container conventions"]
fn char_at_reverse(&self, i: uint) -> char {
- core_str::StrExt::char_at_reverse(self.index(&FullRange), i)
+ core_str::StrExt::char_at_reverse(&self[], i)
}
/// Work with the byte buffer of a string as a byte slice.
/// ```
#[stable]
fn as_bytes(&self) -> &[u8] {
- core_str::StrExt::as_bytes(self.index(&FullRange))
+ core_str::StrExt::as_bytes(&self[])
}
/// Returns the byte index of the first character of `self` that
/// ```
#[stable]
fn find<P: CharEq>(&self, pat: P) -> Option<uint> {
- core_str::StrExt::find(self.index(&FullRange), pat)
+ core_str::StrExt::find(&self[], pat)
}
/// Returns the byte index of the last character of `self` that
/// ```
#[stable]
fn rfind<P: CharEq>(&self, pat: P) -> Option<uint> {
- core_str::StrExt::rfind(self.index(&FullRange), pat)
+ core_str::StrExt::rfind(&self[], pat)
}
/// Returns the byte index of the first matching substring
/// ```
#[unstable = "might get removed in favor of a more generic find in the future"]
fn find_str(&self, needle: &str) -> Option<uint> {
- core_str::StrExt::find_str(self.index(&FullRange), needle)
+ core_str::StrExt::find_str(&self[], needle)
}
/// Retrieves the first character from a string slice and returns
/// ```
#[unstable = "awaiting conventions about shifting and slices"]
fn slice_shift_char(&self) -> Option<(char, &str)> {
- core_str::StrExt::slice_shift_char(self.index(&FullRange))
+ core_str::StrExt::slice_shift_char(&self[])
}
/// Returns the byte offset of an inner slice relative to an enclosing outer slice.
/// ```
#[unstable = "awaiting convention about comparability of arbitrary slices"]
fn subslice_offset(&self, inner: &str) -> uint {
- core_str::StrExt::subslice_offset(self.index(&FullRange), inner)
+ core_str::StrExt::subslice_offset(&self[], inner)
}
/// Return an unsafe pointer to the strings buffer.
#[stable]
#[inline]
fn as_ptr(&self) -> *const u8 {
- core_str::StrExt::as_ptr(self.index(&FullRange))
+ core_str::StrExt::as_ptr(&self[])
}
/// Return an iterator of `u16` over the string encoded as UTF-16.
#[unstable = "this functionality may only be provided by libunicode"]
fn utf16_units(&self) -> Utf16Units {
- Utf16Units { encoder: Utf16Encoder::new(self.index(&FullRange).chars()) }
+ Utf16Units { encoder: Utf16Encoder::new(self[].chars()) }
}
/// Return the number of bytes in this string
#[stable]
#[inline]
fn len(&self) -> uint {
- core_str::StrExt::len(self.index(&FullRange))
+ core_str::StrExt::len(&self[])
}
/// Returns true if this slice contains no bytes
#[inline]
#[stable]
fn is_empty(&self) -> bool {
- core_str::StrExt::is_empty(self.index(&FullRange))
+ core_str::StrExt::is_empty(&self[])
}
/// Parse this string into the specified type.
#[inline]
#[unstable = "this method was just created"]
fn parse<F: FromStr>(&self) -> Option<F> {
- core_str::StrExt::parse(self.index(&FullRange))
+ core_str::StrExt::parse(&self[])
}
/// Returns an iterator over the
/// ```
#[unstable = "this functionality may only be provided by libunicode"]
fn graphemes(&self, is_extended: bool) -> Graphemes {
- UnicodeStr::graphemes(self.index(&FullRange), is_extended)
+ UnicodeStr::graphemes(&self[], is_extended)
}
/// Returns an iterator over the grapheme clusters of self and their byte offsets.
/// ```
#[unstable = "this functionality may only be provided by libunicode"]
fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices {
- UnicodeStr::grapheme_indices(self.index(&FullRange), is_extended)
+ UnicodeStr::grapheme_indices(&self[], is_extended)
}
/// An iterator over the words of a string (subsequences separated
/// ```
#[stable]
fn words(&self) -> Words {
- UnicodeStr::words(self.index(&FullRange))
+ UnicodeStr::words(&self[])
}
/// Returns a string's displayed width in columns, treating control
/// `is_cjk` = `false`) if the locale is unknown.
#[unstable = "this functionality may only be provided by libunicode"]
fn width(&self, is_cjk: bool) -> uint {
- UnicodeStr::width(self.index(&FullRange), is_cjk)
+ UnicodeStr::width(&self[], is_cjk)
}
/// Returns a string with leading and trailing whitespace removed.
#[stable]
fn trim(&self) -> &str {
- UnicodeStr::trim(self.index(&FullRange))
+ UnicodeStr::trim(&self[])
}
/// Returns a string with leading whitespace removed.
#[stable]
fn trim_left(&self) -> &str {
- UnicodeStr::trim_left(self.index(&FullRange))
+ UnicodeStr::trim_left(&self[])
}
/// Returns a string with trailing whitespace removed.
#[stable]
fn trim_right(&self) -> &str {
- UnicodeStr::trim_right(self.index(&FullRange))
+ UnicodeStr::trim_right(&self[])
}
}
if i > 0 {
unsafe {
- res.as_mut_vec().push_all(v.index(&(0..i)))
+ res.as_mut_vec().push_all(&v[0..i])
};
}
macro_rules! error { () => ({
unsafe {
if subseqidx != i_ {
- res.as_mut_vec().push_all(v.index(&(subseqidx..i_)));
+ res.as_mut_vec().push_all(&v[subseqidx..i_]);
}
subseqidx = i;
res.as_mut_vec().push_all(REPLACEMENT);
}
if subseqidx < total {
unsafe {
- res.as_mut_vec().push_all(v.index(&(subseqidx..total)))
+ res.as_mut_vec().push_all(&v[subseqidx..total])
};
}
Cow::Owned(res)
type Output = str;
#[inline]
fn index(&self, index: &ops::Range<uint>) -> &str {
- &self.index(&FullRange)[*index]
+ &self[][*index]
}
}
impl ops::Index<ops::RangeTo<uint>> for String {
type Output = str;
#[inline]
fn index(&self, index: &ops::RangeTo<uint>) -> &str {
- &self.index(&FullRange)[*index]
+ &self[][*index]
}
}
impl ops::Index<ops::RangeFrom<uint>> for String {
type Output = str;
#[inline]
fn index(&self, index: &ops::RangeFrom<uint>) -> &str {
- &self.index(&FullRange)[*index]
+ &self[][*index]
}
}
impl ops::Index<ops::FullRange> for String {
type Target = str;
fn deref<'a>(&'a self) -> &'a str {
- unsafe { mem::transmute(self.vec.index(&FullRange)) }
+ unsafe { mem::transmute(&self.vec[]) }
}
}
// self.len <= other.len due to the truncate above, so the
// slice here is always in-bounds.
- let slice = other.index(&(self.len()..));
+ let slice = &other[self.len()..];
self.push_all(slice);
}
}
use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};
use fmt;
use marker::Copy;
-use ops::{Deref, FullRange, Index};
+use ops::{Deref, FullRange};
use option::Option;
// macro for implementing n-ary tuple functions and operations
#[unstable = "waiting for Show to stabilize"]
impl<T:fmt::Show> fmt::Show for [T; $N] {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Show::fmt(&self.index(&FullRange), f)
+ fmt::Show::fmt(&&self[], f)
}
}
impl<A, B> PartialEq<[B; $N]> for [A; $N] where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &[B; $N]) -> bool {
- self.index(&FullRange) == other.index(&FullRange)
+ &self[] == &other[]
}
#[inline]
fn ne(&self, other: &[B; $N]) -> bool {
- self.index(&FullRange) != other.index(&FullRange)
+ &self[] != &other[]
}
}
{
#[inline(always)]
fn eq(&self, other: &Rhs) -> bool {
- PartialEq::eq(self.index(&FullRange), &**other)
+ PartialEq::eq(&self[], &**other)
}
#[inline(always)]
fn ne(&self, other: &Rhs) -> bool {
- PartialEq::ne(self.index(&FullRange), &**other)
+ PartialEq::ne(&self[], &**other)
}
}
{
#[inline(always)]
fn eq(&self, other: &[B; $N]) -> bool {
- PartialEq::eq(&**self, other.index(&FullRange))
+ PartialEq::eq(&**self, &other[])
}
#[inline(always)]
fn ne(&self, other: &[B; $N]) -> bool {
- PartialEq::ne(&**self, other.index(&FullRange))
+ PartialEq::ne(&**self, &other[])
}
}
impl<T:PartialOrd> PartialOrd for [T; $N] {
#[inline]
fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
- PartialOrd::partial_cmp(&self.index(&FullRange), &other.index(&FullRange))
+ PartialOrd::partial_cmp(&&self[], &&other[])
}
#[inline]
fn lt(&self, other: &[T; $N]) -> bool {
- PartialOrd::lt(&self.index(&FullRange), &other.index(&FullRange))
+ PartialOrd::lt(&&self[], &&other[])
}
#[inline]
fn le(&self, other: &[T; $N]) -> bool {
- PartialOrd::le(&self.index(&FullRange), &other.index(&FullRange))
+ PartialOrd::le(&&self[], &&other[])
}
#[inline]
fn ge(&self, other: &[T; $N]) -> bool {
- PartialOrd::ge(&self.index(&FullRange), &other.index(&FullRange))
+ PartialOrd::ge(&&self[], &&other[])
}
#[inline]
fn gt(&self, other: &[T; $N]) -> bool {
- PartialOrd::gt(&self.index(&FullRange), &other.index(&FullRange))
+ PartialOrd::gt(&&self[], &&other[])
}
}
impl<T:Ord> Ord for [T; $N] {
#[inline]
fn cmp(&self, other: &[T; $N]) -> Ordering {
- Ord::cmp(&self.index(&FullRange), &other.index(&FullRange))
+ Ord::cmp(&&self[], &&other[])
}
}
)+
use iter::{IteratorExt, range};
use num::{cast, Float, ToPrimitive};
use num::FpCategory as Fp;
-use ops::{FnOnce, Index};
+use ops::FnOnce;
use result::Result::Ok;
use slice::{self, SliceExt};
use str::{self, StrExt};
}
}
- f(unsafe { str::from_utf8_unchecked(buf.index(&(0..end))) })
+ f(unsafe { str::from_utf8_unchecked(&buf[0..end]) })
}
use option::Option;
use option::Option::{Some, None};
use result::Result::Ok;
-use ops::{Deref, FnOnce, Index};
+use ops::{Deref, FnOnce};
use result;
use slice::SliceExt;
use slice;
for c in sign.into_iter() {
let mut b = [0; 4];
let n = c.encode_utf8(&mut b).unwrap_or(0);
- let b = unsafe { str::from_utf8_unchecked(b.index(&(0..n))) };
+ let b = unsafe { str::from_utf8_unchecked(&b[0..n]) };
try!(f.buf.write_str(b));
}
if prefixed { f.buf.write_str(prefix) }
let mut fill = [0u8; 4];
let len = self.fill.encode_utf8(&mut fill).unwrap_or(0);
- let fill = unsafe { str::from_utf8_unchecked(fill.index(&(..len))) };
+ let fill = unsafe { str::from_utf8_unchecked(&fill[..len]) };
for _ in range(0, pre_pad) {
try!(self.buf.write_str(fill));
fn fmt(&self, f: &mut Formatter) -> Result {
let mut utf8 = [0u8; 4];
let amt = self.encode_utf8(&mut utf8).unwrap_or(0);
- let s: &str = unsafe { mem::transmute(utf8.index(&(0..amt))) };
+ let s: &str = unsafe { mem::transmute(&utf8[0..amt]) };
String::fmt(s, f)
}
}
use fmt;
use iter::IteratorExt;
-use ops::Index;
use num::{Int, cast};
use slice::SliceExt;
use str;
if x == zero { break }; // No more digits left to accumulate.
}
}
- let buf = unsafe { str::from_utf8_unchecked(buf.index(&(curr..))) };
+ let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) };
f.pad_integral(is_positive, self.prefix(), buf)
}
}
use marker::Copy;
use mem::size_of;
use ops::{Add, Sub, Mul, Div, Rem, Neg};
-use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr, Index};
+use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr};
use option::Option;
use option::Option::{Some, None};
use str::{FromStr, StrExt};
};
// Parse the exponent as decimal integer
- let src = src.index(&(offset..));
+ let src = &src[offset..];
let (is_positive, exp) = match src.slice_shift_char() {
Some(('-', src)) => (false, src.parse::<uint>()),
Some(('+', src)) => (true, src.parse::<uint>()),
#[inline]
fn split_at(&self, mid: uint) -> (&[T], &[T]) {
- (self.index(&(0..mid)), self.index(&(mid..)))
+ (&self[0..mid], &self[mid..])
}
#[inline]
}
#[inline]
- fn tail(&self) -> &[T] { self.index(&(1..)) }
+ fn tail(&self) -> &[T] { &self[1..] }
#[inline]
fn init(&self) -> &[T] {
- self.index(&(0..(self.len() - 1)))
+ &self[0..(self.len() - 1)]
}
#[inline]
#[inline]
fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
let n = needle.len();
- self.len() >= n && needle == self.index(&(0..n))
+ self.len() >= n && needle == &self[0..n]
}
#[inline]
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
let (m, n) = (self.len(), needle.len());
- m >= n && needle == self.index(&((m-n)..))
+ m >= n && needle == &self[(m-n)..]
}
#[unstable]
match self.v.iter().position(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
- let ret = Some(self.v.index(&(0..idx)));
- self.v = self.v.index(&((idx + 1)..));
+ let ret = Some(&self.v[0..idx]);
+ self.v = &self.v[(idx + 1)..];
ret
}
}
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
- let ret = Some(self.v.index(&((idx + 1)..)));
- self.v = self.v.index(&(0..idx));
+ let ret = Some(&self.v[(idx + 1)..]);
+ self.v = &self.v[0..idx];
ret
}
}
if self.size > self.v.len() {
None
} else {
- let ret = Some(self.v.index(&(0..self.size)));
- self.v = self.v.index(&(1..));
+ let ret = Some(&self.v[0..self.size]);
+ self.v = &self.v[1..];
ret
}
}
let mut hi = lo + self.size;
if hi < lo || hi > self.v.len() { hi = self.v.len(); }
- Some(self.v.index(&(lo..hi)))
+ Some(&self.v[lo..hi])
} else {
None
}
use marker::Sized;
use mem;
use num::Int;
-use ops::{Fn, FnMut, Index};
+use ops::{Fn, FnMut};
use option::Option::{self, None, Some};
use ptr::PtrExt;
use raw::{Repr, Slice};
fn next(&mut self, haystack: &[u8], needle: &[u8]) -> Option<(uint, uint)> {
while self.position + needle.len() <= haystack.len() {
- if haystack.index(&(self.position .. self.position + needle.len())) == needle {
+ if &haystack[self.position .. self.position + needle.len()] == needle {
let match_pos = self.position;
self.position += needle.len(); // add 1 for all matches
return Some((match_pos, match_pos + needle.len()));
//
// What's going on is we have some critical factorization (u, v) of the
// needle, and we want to determine whether u is a suffix of
- // v.index(&(0..period)). If it is, we use "Algorithm CP1". Otherwise we use
+ // &v[0..period]. If it is, we use "Algorithm CP1". Otherwise we use
// "Algorithm CP2", which is optimized for when the period of the needle
// is large.
- if needle.index(&(0..crit_pos)) == needle.index(&(period.. period + crit_pos)) {
+ if &needle[0..crit_pos] == &needle[period.. period + crit_pos] {
TwoWaySearcher {
crit_pos: crit_pos,
period: period,
#[inline]
fn starts_with(&self, needle: &str) -> bool {
let n = needle.len();
- self.len() >= n && needle.as_bytes() == self.as_bytes().index(&(0..n))
+ self.len() >= n && needle.as_bytes() == &self.as_bytes()[0..n]
}
#[inline]
fn ends_with(&self, needle: &str) -> bool {
let (m, n) = (self.len(), needle.len());
- m >= n && needle.as_bytes() == self.as_bytes().index(&((m-n)..))
+ m >= n && needle.as_bytes() == &self.as_bytes()[(m-n)..]
}
#[inline]
fn check(input: char, expect: &[u8]) {
let mut buf = [0u8; 4];
let n = input.encode_utf8(buf.as_mut_slice()).unwrap_or(0);
- assert_eq!(buf.index(&(0..n)), expect);
+ assert_eq!(&buf[0..n], expect);
}
check('x', &[0x78]);
fn check(input: char, expect: &[u16]) {
let mut buf = [0u16; 2];
let n = input.encode_utf16(buf.as_mut_slice()).unwrap_or(0);
- assert_eq!(buf.index(&(0..n)), expect);
+ assert_eq!(&buf[0..n], expect);
}
check('x', &[0x0078]);
}
{
let mut iter = data.iter_mut();
- assert_eq!(iter.index(&FullRange), other_data.index(&FullRange));
+ assert_eq!(&iter[], &other_data[]);
// mutability:
assert!(&mut iter[] == other_data);
iter.next();
- assert_eq!(iter.index(&FullRange), other_data.index(&(1..)));
+ assert_eq!(&iter[], &other_data[1..]);
assert!(&mut iter[] == &mut other_data[1..]);
iter.next_back();
- assert_eq!(iter.index(&FullRange), other_data.index(&(1..2)));
+ assert_eq!(&iter[], &other_data[1..2]);
assert!(&mut iter[] == &mut other_data[1..2]);
let s = iter.into_slice();
self.cur.next();
}
Some((_, other)) => {
- self.err(format!("expected `{:?}`, found `{:?}`", c,
- other).index(&FullRange));
+ self.err(&format!("expected `{:?}`, found `{:?}`", c,
+ other)[]);
}
None => {
- self.err(format!("expected `{:?}` but string was terminated",
- c).index(&FullRange));
+ self.err(&format!("expected `{:?}` but string was terminated",
+ c)[]);
}
}
}
// we may not consume the character, so clone the iterator
match self.cur.clone().next() {
Some((pos, '}')) | Some((pos, '{')) => {
- return self.input.index(&(start..pos));
+ return &self.input[start..pos];
}
Some(..) => { self.cur.next(); }
None => {
self.cur.next();
- return self.input.index(&(start..self.input.len()));
+ return &self.input[start..self.input.len()];
}
}
}
flags: 0,
precision: CountImplied,
width: CountImplied,
- ty: self.input.index(&(0..0)),
+ ty: &self.input[0..0],
};
if !self.consume(':') { return spec }
self.cur.next();
pos
}
- Some(..) | None => { return self.input.index(&(0..0)); }
+ Some(..) | None => { return &self.input[0..0]; }
};
let mut end;
loop {
None => { end = self.input.len(); break }
}
}
- self.input.index(&(start..end))
+ &self.input[start..end]
}
/// Optionally parses an integer at the current position. This doesn't deal
impl Matches {
fn opt_vals(&self, nm: &str) -> Vec<Optval> {
- match find_opt(self.opts.index(&FullRange), Name::from_str(nm)) {
+ match find_opt(&self.opts[], Name::from_str(nm)) {
Some(id) => self.vals[id].clone(),
None => panic!("No option '{}' defined", nm)
}
/// Returns true if any of several options were matched.
pub fn opts_present(&self, names: &[String]) -> bool {
for nm in names.iter() {
- match find_opt(self.opts.as_slice(), Name::from_str(nm.index(&FullRange))) {
+ match find_opt(self.opts.as_slice(), Name::from_str(&nm[])) {
Some(id) if !self.vals[id].is_empty() => return true,
_ => (),
};
/// Returns the string argument supplied to one of several matching options or `None`.
pub fn opts_str(&self, names: &[String]) -> Option<String> {
for nm in names.iter() {
- match self.opt_val(nm.index(&FullRange)) {
+ match self.opt_val(&nm[]) {
Some(Val(ref s)) => return Some(s.clone()),
_ => ()
}
while i < l {
let cur = args[i].clone();
let curlen = cur.len();
- if !is_arg(cur.index(&FullRange)) {
+ if !is_arg(&cur[]) {
free.push(cur);
} else if cur == "--" {
let mut j = i + 1;
let mut names;
let mut i_arg = None;
if cur.as_bytes()[1] == b'-' {
- let tail = cur.index(&(2..curlen));
+ let tail = &cur[2..curlen];
let tail_eq: Vec<&str> = tail.split('=').collect();
if tail_eq.len() <= 1 {
names = vec!(Long(tail.to_string()));
};
if arg_follows && range.next < curlen {
- i_arg = Some(cur.index(&(range.next..curlen)).to_string());
+ i_arg = Some((&cur[range.next..curlen]).to_string());
break;
}
v.push(Val((i_arg.clone())
.unwrap()));
} else if name_pos < names.len() || i + 1 == l ||
- is_arg(args[i + 1].index(&FullRange)) {
+ is_arg(&args[i + 1][]) {
let v = &mut vals[optid];
v.push(Given);
} else {
0 => {}
1 => {
row.push('-');
- row.push_str(short_name.index(&FullRange));
+ row.push_str(&short_name[]);
row.push(' ');
}
_ => panic!("the short name should only be 1 ascii char long"),
0 => {}
_ => {
row.push_str("--");
- row.push_str(long_name.index(&FullRange));
+ row.push_str(&long_name[]);
row.push(' ');
}
}
// arg
match hasarg {
No => {}
- Yes => row.push_str(hint.index(&FullRange)),
+ Yes => row.push_str(&hint[]),
Maybe => {
row.push('[');
- row.push_str(hint.index(&FullRange));
+ row.push_str(&hint[]);
row.push(']');
}
}
row.push(' ');
}
} else {
- row.push_str(desc_sep.index(&FullRange));
+ row.push_str(&desc_sep[]);
}
// Normalize desc to contain words separated by one space character
// FIXME: #5516 should be graphemes not codepoints
let mut desc_rows = Vec::new();
- each_split_within(desc_normalized_whitespace.index(&FullRange), 54, |substr| {
+ each_split_within(&desc_normalized_whitespace[], 54, |substr| {
desc_rows.push(substr.to_string());
true
});
// FIXME: #5516 should be graphemes not codepoints
// wrapped description
- row.push_str(desc_rows.connect(desc_sep.index(&FullRange)).index(&FullRange));
+ row.push_str(&desc_rows.connect(&desc_sep[])[]);
row
});
// Use short_name is possible, but fallback to long_name.
if opt.short_name.len() > 0 {
line.push('-');
- line.push_str(opt.short_name.index(&FullRange));
+ line.push_str(&opt.short_name[]);
} else {
line.push_str("--");
- line.push_str(opt.long_name.index(&FullRange));
+ line.push_str(&opt.long_name[]);
}
if opt.hasarg != No {
if opt.hasarg == Maybe {
line.push('[');
}
- line.push_str(opt.hint.index(&FullRange));
+ line.push_str(&opt.hint[]);
if opt.hasarg == Maybe {
line.push(']');
}
/// Derive a short one-line usage summary from a set of long options.
pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String {
let mut line = format!("Usage: {} ", program_name);
- line.push_str(opts.iter()
- .map(format_option)
- .collect::<Vec<String>>()
- .connect(" ").index(&FullRange));
+ line.push_str(&opts.iter()
+ .map(format_option)
+ .collect::<Vec<String>>()
+ .connect(" ")[]);
line
}
(B, Cr, UnderLim) => { B }
(B, Cr, OverLim) if (i - last_start + 1) > lim
=> panic!("word starting with {} longer than limit!",
- ss.index(&(last_start..(i + 1)))),
+ &ss[last_start..(i + 1)]),
(B, Cr, OverLim) => {
- *cont = it(ss.index(&(slice_start..last_end)));
+ *cont = it(&ss[slice_start..last_end]);
slice_start = last_start;
B
}
}
(B, Ws, OverLim) => {
last_end = i;
- *cont = it(ss.index(&(slice_start..last_end)));
+ *cont = it(&ss[slice_start..last_end]);
A
}
B
}
(C, Cr, OverLim) => {
- *cont = it(ss.index(&(slice_start..last_end)));
+ *cont = it(&ss[slice_start..last_end]);
slice_start = i;
last_start = i;
last_end = i;
B
}
(C, Ws, OverLim) => {
- *cont = it(ss.index(&(slice_start..last_end)));
+ *cont = it(&ss[slice_start..last_end]);
A
}
(C, Ws, UnderLim) => {
pub fn escape(&self) -> String {
match self {
&LabelStr(ref s) => s.escape_default(),
- &EscStr(ref s) => LabelText::escape_str(s.index(&FullRange)),
+ &EscStr(ref s) => LabelText::escape_str(&s[]),
}
}
let mut prefix = self.pre_escaped_content().into_owned();
let suffix = suffix.pre_escaped_content();
prefix.push_str(r"\n\n");
- prefix.push_str(suffix.index(&FullRange));
+ prefix.push_str(&suffix[]);
EscStr(prefix.into_cow())
}
}
impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph {
fn graph_id(&'a self) -> Id<'a> {
- Id::new(self.name.index(&FullRange)).unwrap()
+ Id::new(&self.name[]).unwrap()
}
fn node_id(&'a self, n: &Node) -> Id<'a> {
id_name(n)
// Test the literal string from args against the current filter, if there
// is one.
match unsafe { FILTER.as_ref() } {
- Some(filter) if !filter.is_match(args.to_string().index(&FullRange)) => return,
+ Some(filter) if !filter.is_match(&args.to_string()[]) => return,
_ => {}
}
// Search for the longest match, the vector is assumed to be pre-sorted.
for directive in iter.rev() {
match directive.name {
- Some(ref name) if !module.starts_with(name.index(&FullRange)) => {},
+ Some(ref name) if !module.starts_with(&name[]) => {},
Some(..) | None => {
return level <= directive.level
}
/// `Once` primitive (and this function is called from that primitive).
fn init() {
let (mut directives, filter) = match os::getenv("RUST_LOG") {
- Some(spec) => directive::parse_logging_spec(spec.index(&FullRange)),
+ Some(spec) => directive::parse_logging_spec(&spec[]),
None => (Vec::new(), None),
};
/// let mut rng = thread_rng();
/// println!("{:?}", rng.choose(&choices));
/// # // uncomment when slicing syntax is stable
- /// //assert_eq!(rng.choose(choices.index(&(0..0))), None);
+ /// //assert_eq!(rng.choose(&choices[0..0]), None);
/// ```
fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> {
if values.is_empty() {
// there (left), and what will be appended on the end (right)
let cap = self.buf.len() - self.pos;
let (left, right) = if cap <= buf.len() {
- (buf.index(&(0..cap)), buf.index(&(cap..)))
+ (&buf[0..cap], &buf[cap..])
} else {
let result: (_, &[_]) = (buf, &[]);
result
}
pub fn as_str_slice<'a>(&'a self) -> &'a str {
- str::from_utf8(self.data.index(&(self.start..self.end))).unwrap()
+ str::from_utf8(&self.data[self.start..self.end]).unwrap()
}
pub fn as_str(&self) -> String {
pub fn with_doc_data<T, F>(d: Doc, f: F) -> T where
F: FnOnce(&[u8]) -> T,
{
- f(d.data.index(&(d.start..d.end)))
+ f(&d.data[d.start..d.end])
}
// This is a bit hacky since we have to skip over the initial
// 'Save' instruction.
let mut pre = String::with_capacity(5);
- for inst in c.insts.index(&(1..)).iter() {
+ for inst in c.insts[1..].iter() {
match *inst {
OneChar(c, FLAG_EMPTY) => pre.push(c),
_ => break
use std::fmt;
use std::iter;
use std::num;
-use std::ops::Index;
/// Static data containing Unicode ranges for general categories and scripts.
use unicode::regex::{UNICODE_CLASSES, PERLD, PERLS, PERLW};
match self.next_char() {
true => Ok(()),
false => {
- self.err(format!("Expected {:?} but got EOF.",
- expected).index(&FullRange))
+ self.err(&format!("Expected {:?} but got EOF.",
+ expected)[])
}
}
}
fn expect(&mut self, expected: char) -> Result<(), Error> {
match self.next_char() {
true if self.cur() == expected => Ok(()),
- true => self.err(format!("Expected '{:?}' but got '{:?}'.",
- expected, self.cur()).index(&FullRange)),
+ true => self.err(&format!("Expected '{:?}' but got '{:?}'.",
+ expected, self.cur())[]),
false => {
- self.err(format!("Expected '{:?}' but got EOF.",
- expected).index(&FullRange))
+ self.err(&format!("Expected '{:?}' but got EOF.",
+ expected)[])
}
}
}
match try!(self.parse_escape()) {
Literal(c3, _) => c2 = c3, // allow literal escapes below
ast =>
- return self.err(format!("Expected a literal, but got {:?}.",
- ast).index(&FullRange)),
+ return self.err(&format!("Expected a literal, but got {:?}.",
+ ast)[]),
}
}
if c2 < c {
- return self.err(format!("Invalid character class \
- range '{}-{}'",
- c,
- c2).index(&FullRange))
+ return self.err(&format!("Invalid character class \
+ range '{}-{}'",
+ c,
+ c2)[])
}
ranges.push((c, self.cur()))
} else {
FLAG_EMPTY
};
let name = self.slice(name_start, closer - 1);
- match find_class(ASCII_CLASSES, name.index(&FullRange)) {
+ match find_class(ASCII_CLASSES, &name[]) {
None => None,
Some(ranges) => {
self.chari = closer;
match self.pos('}') {
Some(i) => i,
None => {
- return self.err(format!("No closing brace for counted \
- repetition starting at position \
- {:?}.",
- start).index(&FullRange))
+ return self.err(&format!("No closing brace for counted \
+ repetition starting at position \
+ {:?}.",
+ start)[])
}
};
self.chari = closer;
let greed = try!(self.get_next_greedy());
- let inner = self.chars.index(&((start+1)..closer)).iter().cloned()
+ let inner = self.chars[(start+1)..closer].iter().cloned()
.collect::<String>();
// Parse the min and max values from the regex.
let (mut min, mut max): (uint, Option<uint>);
if !inner.contains(",") {
- min = try!(self.parse_uint(inner.index(&FullRange)));
+ min = try!(self.parse_uint(&inner[]));
max = Some(min);
} else {
let pieces: Vec<&str> = inner.splitn(1, ',').collect();
// Do some bounds checking and make sure max >= min.
if min > MAX_REPEAT {
- return self.err(format!(
+ return self.err(&format!(
"{} exceeds maximum allowed repetitions ({})",
- min, MAX_REPEAT).index(&FullRange));
+ min, MAX_REPEAT)[]);
}
if max.is_some() {
let m = max.unwrap();
if m > MAX_REPEAT {
- return self.err(format!(
+ return self.err(&format!(
"{} exceeds maximum allowed repetitions ({})",
- m, MAX_REPEAT).index(&FullRange));
+ m, MAX_REPEAT)[]);
}
if m < min {
- return self.err(format!(
+ return self.err(&format!(
"Max repetitions ({}) cannot be smaller than min \
- repetitions ({}).", m, min).index(&FullRange));
+ repetitions ({}).", m, min)[]);
}
}
Ok(AstClass(ranges, flags))
}
_ => {
- self.err(format!("Invalid escape sequence '\\\\{}'", c).index(&FullRange))
+ self.err(&format!("Invalid escape sequence '\\\\{}'", c)[])
}
}
}
let closer =
match self.pos('}') {
Some(i) => i,
- None => return self.err(format!(
+ None => return self.err(&format!(
"Missing '}}' for unclosed '{{' at position {}",
- self.chari).index(&FullRange)),
+ self.chari)[]),
};
if closer - self.chari + 1 == 0 {
return self.err("No Unicode class name found.")
name = self.slice(self.chari + 1, self.chari + 2);
self.chari += 1;
}
- match find_class(UNICODE_CLASSES, name.index(&FullRange)) {
+ match find_class(UNICODE_CLASSES, &name[]) {
None => {
- return self.err(format!("Could not find Unicode class '{}'",
- name).index(&FullRange))
+ return self.err(&format!("Could not find Unicode class '{}'",
+ name)[])
}
Some(ranges) => {
Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE)))
}
}
let s = self.slice(start, end);
- match num::from_str_radix::<u32>(s.index(&FullRange), 8) {
+ match num::from_str_radix::<u32>(&s[], 8) {
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
None => {
- self.err(format!("Could not parse '{:?}' as octal number.",
- s).index(&FullRange))
+ self.err(&format!("Could not parse '{:?}' as octal number.",
+ s)[])
}
}
}
let closer =
match self.pos('}') {
None => {
- return self.err(format!("Missing '}}' for unclosed \
+ return self.err(&format!("Missing '}}' for unclosed \
'{{' at position {}",
- start).index(&FullRange))
+ start)[])
}
Some(i) => i,
};
self.chari = closer;
- self.parse_hex_digits(self.slice(start, closer).index(&FullRange))
+ self.parse_hex_digits(&self.slice(start, closer)[])
}
// Parses a two-digit hex number.
match num::from_str_radix::<u32>(s, 16) {
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
None => {
- self.err(format!("Could not parse '{}' as hex number.", s).index(&FullRange))
+ self.err(&format!("Could not parse '{}' as hex number.", s)[])
}
}
}
"Capture names can only have underscores, letters and digits.")
}
if self.names.contains(&name) {
- return self.err(format!("Duplicate capture group name '{}'.",
- name).index(&FullRange))
+ return self.err(&format!("Duplicate capture group name '{}'.",
+ name)[])
}
self.names.push(name.clone());
self.chari = closer;
'U' => { flags = flags | FLAG_SWAP_GREED; saw_flag = true},
'-' => {
if sign < 0 {
- return self.err(format!(
+ return self.err(&format!(
"Cannot negate flags twice in '{}'.",
- self.slice(start, self.chari + 1)).index(&FullRange))
+ self.slice(start, self.chari + 1))[])
}
sign = -1;
saw_flag = false;
':' | ')' => {
if sign < 0 {
if !saw_flag {
- return self.err(format!(
+ return self.err(&format!(
"A valid flag does not follow negation in '{}'",
- self.slice(start, self.chari + 1)).index(&FullRange))
+ self.slice(start, self.chari + 1))[])
}
flags = flags ^ flags;
}
self.flags = flags;
return Ok(())
}
- _ => return self.err(format!(
- "Unrecognized flag '{}'.", self.cur()).index(&FullRange)),
+ _ => return self.err(&format!(
+ "Unrecognized flag '{}'.", self.cur())[]),
}
}
}
match s.parse::<uint>() {
Some(i) => Ok(i),
None => {
- self.err(format!("Expected an unsigned integer but got '{}'.",
- s).index(&FullRange))
+ self.err(&format!("Expected an unsigned integer but got '{}'.",
+ s)[])
}
}
}
match char::from_u32(n) {
Some(c) => Ok(c),
None => {
- self.err(format!("Could not decode '{}' to unicode \
- character.", n).index(&FullRange))
+ self.err(&format!("Could not decode '{}' to unicode \
+ character.", n)[])
}
}
}
}
fn slice(&self, start: uint, end: uint) -> String {
- self.chars.index(&(start..end)).iter().cloned().collect()
+ self.chars[start..end].iter().cloned().collect()
}
}
}
let (s, e) = cap.pos(0).unwrap(); // captures only reports matches
- new.push_str(text.index(&(last_match..s)));
- new.push_str(rep.reg_replace(&cap).index(&FullRange));
+ new.push_str(&text[last_match..s]);
+ new.push_str(&rep.reg_replace(&cap)[]);
last_match = e;
}
- new.push_str(text.index(&(last_match..text.len())));
+ new.push_str(&text[last_match..text.len()]);
return new;
}
/// Returns the original string of this regex.
pub fn as_str<'a>(&'a self) -> &'a str {
match *self {
- Dynamic(ExDynamic { ref original, .. }) => original.index(&FullRange),
- Native(ExNative { ref original, .. }) => original.index(&FullRange),
+ Dynamic(ExDynamic { ref original, .. }) => &original[],
+ Native(ExNative { ref original, .. }) => &original[],
}
}
if self.last >= text.len() {
None
} else {
- let s = text.index(&(self.last..text.len()));
+ let s = &text[self.last..text.len()];
self.last = text.len();
Some(s)
}
}
Some((s, e)) => {
- let matched = text.index(&(self.last..s));
+ let matched = &text[self.last..s];
self.last = e;
Some(matched)
}
} else {
self.cur += 1;
if self.cur >= self.limit {
- Some(text.index(&(self.splits.last..text.len())))
+ Some(&text[self.splits.last..text.len()])
} else {
self.splits.next()
}
})
});
let re = Regex::new(r"\$\$").unwrap();
- re.replace_all(text.index(&FullRange), NoExpand("$"))
+ re.replace_all(&text[], NoExpand("$"))
}
/// Returns the number of captured groups.
// out early.
if self.prog.prefix.len() > 0 && clist.size == 0 {
let needle = self.prog.prefix.as_bytes();
- let haystack = self.input.as_bytes().index(&(self.ic..));
+ let haystack = &self.input.as_bytes()[self.ic..];
match find_prefix(needle, haystack) {
None => break,
Some(i) => {
if n_uniq > 0 {
let s = ty_to_string(cx.tcx, ty);
let m = format!("type uses owned (Box type) pointers: {}", s);
- cx.span_lint(BOX_POINTERS, span, m.index(&FullRange));
+ cx.span_lint(BOX_POINTERS, span, &m[]);
}
}
}
}
fn check_item(&mut self, cx: &Context, item: &ast::Item) {
- if !attr::contains_name(item.attrs.index(&FullRange), "automatically_derived") {
+ if !attr::contains_name(&item.attrs[], "automatically_derived") {
return
}
let did = match item.node {
ty::ty_enum(did, _) => {
if ast_util::is_local(did) {
if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) {
- warned |= check_must_use(cx, it.attrs.index(&FullRange), s.span);
+ warned |= check_must_use(cx, &it.attrs[], s.span);
}
} else {
csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| {
- warned |= check_must_use(cx, attrs.index(&FullRange), s.span);
+ warned |= check_must_use(cx, &attrs[], s.span);
});
}
}
msg.push_str(s.get());
}
}
- cx.span_lint(UNUSED_MUST_USE, sp, msg.index(&FullRange));
+ cx.span_lint(UNUSED_MUST_USE, sp, &msg[]);
return true;
}
}
} else {
format!("{} `{}` should have a camel case name such as `{}`", sort, s, c)
};
- cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.index(&FullRange));
+ cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[]);
}
}
}
if !is_snake_case(ident) {
cx.span_lint(NON_SNAKE_CASE, span,
- format!("{} `{}` should have a snake case name such as `{}`",
- sort, s, to_snake_case(s.get())).index(&FullRange));
+ &format!("{} `{}` should have a snake case name such as `{}`",
+ sort, s, to_snake_case(s.get()))[]);
}
}
}
// upper/lowercase)
if s.get().chars().any(|c| c.is_lowercase()) {
cx.span_lint(NON_UPPER_CASE_GLOBALS, it.span,
- format!("static constant `{}` should have an uppercase name \
+ &format!("static constant `{}` should have an uppercase name \
such as `{}`",
- s.get(), s.get().chars().map(|c| c.to_uppercase())
- .collect::<String>().index(&FullRange)).index(&FullRange));
+ s.get(), &s.get().chars().map(|c| c.to_uppercase())
+ .collect::<String>()[])[]);
}
}
_ => {}
let s = token::get_ident(path1.node);
if s.get().chars().any(|c| c.is_lowercase()) {
cx.span_lint(NON_UPPER_CASE_GLOBALS, path1.span,
- format!("static constant in pattern `{}` should have an uppercase \
+ &format!("static constant in pattern `{}` should have an uppercase \
name such as `{}`",
- s.get(), s.get().chars().map(|c| c.to_uppercase())
- .collect::<String>().index(&FullRange)).index(&FullRange));
+ s.get(), &s.get().chars().map(|c| c.to_uppercase())
+ .collect::<String>()[])[]);
}
}
_ => {}
let necessary = struct_lit_needs_parens && contains_exterior_struct_lit(&**inner);
if !necessary {
cx.span_lint(UNUSED_PARENS, value.span,
- format!("unnecessary parentheses around {}",
- msg).index(&FullRange))
+ &format!("unnecessary parentheses around {}",
+ msg)[])
}
}
let m = format!("braces around {} is unnecessary",
token::get_ident(*name).get());
cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span,
- m.index(&FullRange));
+ &m[]);
},
_ => ()
}
if let ast::PatIdent(_, ident, None) = fieldpat.node.pat.node {
if ident.node.as_str() == fieldpat.node.ident.as_str() {
cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span,
- format!("the `{}:` in this pattern is redundant and can \
- be removed", ident.node.as_str()).index(&FullRange))
+ &format!("the `{}:` in this pattern is redundant and can \
+ be removed", ident.node.as_str())[])
}
}
}
fn check_expr(&mut self, cx: &Context, e: &ast::Expr) {
if let ast::ExprMatch(_, ref arms, _) = e.node {
for a in arms.iter() {
- self.check_unused_mut_pat(cx, a.pats.index(&FullRange))
+ self.check_unused_mut_pat(cx, &a.pats[])
}
}
}
});
if !has_doc {
cx.span_lint(MISSING_DOCS, sp,
- format!("missing documentation for {}", desc).index(&FullRange));
+ &format!("missing documentation for {}", desc)[]);
}
}
}
let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| {
attr.check_name("doc") && match attr.meta_item_list() {
None => false,
- Some(l) => attr::contains_name(l.index(&FullRange), "hidden"),
+ Some(l) => attr::contains_name(&l[], "hidden"),
}
});
self.doc_hidden_stack.push(doc_hidden);
}
fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) {
- self.check_missing_docs_attrs(cx, None, krate.attrs.index(&FullRange),
+ self.check_missing_docs_attrs(cx, None, &krate.attrs[],
krate.span, "crate");
}
ast::ItemTy(..) => "a type alias",
_ => return
};
- self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.index(&FullRange),
+ self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs[],
it.span, desc);
}
// Otherwise, doc according to privacy. This will also check
// doc for default methods defined on traits.
- self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.index(&FullRange),
+ self.check_missing_docs_attrs(cx, Some(m.id), &m.attrs[],
m.span, "a method");
}
}
fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) {
- self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.index(&FullRange),
+ self.check_missing_docs_attrs(cx, Some(tm.id), &tm.attrs[],
tm.span, "a type method");
}
let cur_struct_def = *self.struct_def_stack.last()
.expect("empty struct_def_stack");
self.check_missing_docs_attrs(cx, Some(cur_struct_def),
- sf.node.attrs.index(&FullRange), sf.span,
+ &sf.node.attrs[], sf.span,
"a struct field")
}
}
}
fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) {
- self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.index(&FullRange),
+ self.check_missing_docs_attrs(cx, Some(v.node.id), &v.node.attrs[],
v.span, "a variant");
assert!(!self.in_variant);
self.in_variant = true;
_ => format!("use of {} item", label)
};
- cx.span_lint(lint, span, msg.index(&FullRange));
+ cx.span_lint(lint, span, &msg[]);
}
fn is_internal(&self, cx: &Context, span: Span) -> bool {
}
pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] {
- self.lints.index(&FullRange)
+ &self.lints[]
}
pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec<LintId>, bool)> {
match (sess, from_plugin) {
// We load builtin lints first, so a duplicate is a compiler bug.
// Use early_error when handling -W help with no crate.
- (None, _) => early_error(msg.index(&FullRange)),
- (Some(sess), false) => sess.bug(msg.index(&FullRange)),
+ (None, _) => early_error(&msg[]),
+ (Some(sess), false) => sess.bug(&msg[]),
// A duplicate name from a plugin is a user error.
- (Some(sess), true) => sess.err(msg.index(&FullRange)),
+ (Some(sess), true) => sess.err(&msg[]),
}
}
match (sess, from_plugin) {
// We load builtin lints first, so a duplicate is a compiler bug.
// Use early_error when handling -W help with no crate.
- (None, _) => early_error(msg.index(&FullRange)),
- (Some(sess), false) => sess.bug(msg.index(&FullRange)),
+ (None, _) => early_error(&msg[]),
+ (Some(sess), false) => sess.bug(&msg[]),
// A duplicate name from a plugin is a user error.
- (Some(sess), true) => sess.err(msg.index(&FullRange)),
+ (Some(sess), true) => sess.err(&msg[]),
}
}
}
let warning = format!("lint {} has been renamed to {}",
lint_name, new_name);
match span {
- Some(span) => sess.span_warn(span, warning.index(&FullRange)),
- None => sess.warn(warning.index(&FullRange)),
+ Some(span) => sess.span_warn(span, &warning[]),
+ None => sess.warn(&warning[]),
};
Some(lint_id)
}
pub fn process_command_line(&mut self, sess: &Session) {
for &(ref lint_name, level) in sess.opts.lint_opts.iter() {
- match self.find_lint(lint_name.index(&FullRange), sess, None) {
+ match self.find_lint(&lint_name[], sess, None) {
Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
None => {
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
.collect::<FnvHashMap<&'static str,
Vec<LintId>>>()
- .get(lint_name.index(&FullRange)) {
+ .get(&lint_name[]) {
Some(v) => {
v.iter()
.map(|lint_id: &LintId|
self.set_level(*lint_id, (level, CommandLine)))
.collect::<Vec<()>>();
}
- None => sess.err(format!("unknown {} flag: {}",
- level.as_str(), lint_name).index(&FullRange)),
+ None => sess.err(&format!("unknown {} flag: {}",
+ level.as_str(), lint_name)[]),
}
}
}
if level == Forbid { level = Deny; }
match (level, span) {
- (Warn, Some(sp)) => sess.span_warn(sp, msg.index(&FullRange)),
- (Warn, None) => sess.warn(msg.index(&FullRange)),
- (Deny, Some(sp)) => sess.span_err(sp, msg.index(&FullRange)),
- (Deny, None) => sess.err(msg.index(&FullRange)),
+ (Warn, Some(sp)) => sess.span_warn(sp, &msg[]),
+ (Warn, None) => sess.warn(&msg[]),
+ (Deny, Some(sp)) => sess.span_err(sp, &msg[]),
+ (Deny, None) => sess.err(&msg[]),
_ => sess.bug("impossible level in raw_emit_lint"),
}
if now == Forbid && level != Forbid {
let lint_name = lint_id.as_str();
self.tcx.sess.span_err(span,
- format!("{}({}) overruled by outer forbid({})",
+ &format!("{}({}) overruled by outer forbid({})",
level.as_str(), lint_name,
- lint_name).index(&FullRange));
+ lint_name)[]);
} else if now != level {
let src = self.lints.get_level_source(lint_id).1;
self.level_stack.push((lint_id, (now, src)));
impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
fn visit_item(&mut self, it: &ast::Item) {
- self.with_lint_attrs(it.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&it.attrs[], |cx| {
run_lints!(cx, check_item, it);
cx.visit_ids(|v| v.visit_item(it));
visit::walk_item(cx, it);
}
fn visit_foreign_item(&mut self, it: &ast::ForeignItem) {
- self.with_lint_attrs(it.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&it.attrs[], |cx| {
run_lints!(cx, check_foreign_item, it);
visit::walk_foreign_item(cx, it);
})
}
fn visit_view_item(&mut self, i: &ast::ViewItem) {
- self.with_lint_attrs(i.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&i.attrs[], |cx| {
run_lints!(cx, check_view_item, i);
cx.visit_ids(|v| v.visit_view_item(i));
visit::walk_view_item(cx, i);
body: &'v ast::Block, span: Span, id: ast::NodeId) {
match fk {
visit::FkMethod(_, _, m) => {
- self.with_lint_attrs(m.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&m.attrs[], |cx| {
run_lints!(cx, check_fn, fk, decl, body, span, id);
cx.visit_ids(|v| {
v.visit_fn(fk, decl, body, span, id);
}
fn visit_ty_method(&mut self, t: &ast::TypeMethod) {
- self.with_lint_attrs(t.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&t.attrs[], |cx| {
run_lints!(cx, check_ty_method, t);
visit::walk_ty_method(cx, t);
})
}
fn visit_struct_field(&mut self, s: &ast::StructField) {
- self.with_lint_attrs(s.node.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&s.node.attrs[], |cx| {
run_lints!(cx, check_struct_field, s);
visit::walk_struct_field(cx, s);
})
}
fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) {
- self.with_lint_attrs(v.node.attrs.index(&FullRange), |cx| {
+ self.with_lint_attrs(&v.node.attrs[], |cx| {
run_lints!(cx, check_variant, v, g);
visit::walk_variant(cx, v, g);
run_lints!(cx, check_variant_post, v, g);
None => {}
Some(lints) => {
for (lint_id, span, msg) in lints.into_iter() {
- self.span_lint(lint_id.lint, span, msg.index(&FullRange))
+ self.span_lint(lint_id.lint, span, &msg[])
}
}
}
let mut cx = Context::new(tcx, krate, exported_items);
// Visit the whole crate.
- cx.with_lint_attrs(krate.attrs.index(&FullRange), |cx| {
+ cx.with_lint_attrs(&krate.attrs[], |cx| {
cx.visit_id(ast::CRATE_NODE_ID);
cx.visit_ids(|v| {
v.visited_outermost = true;
}
fn should_link(i: &ast::ViewItem) -> bool {
- !attr::contains_name(i.attrs.index(&FullRange), "no_link")
+ !attr::contains_name(&i.attrs[], "no_link")
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
- err(format!("invalid character `{}` in crate name: `{}`", c, s).index(&FullRange));
+ err(&format!("invalid character `{}` in crate name: `{}`", c, s)[]);
}
match sess {
Some(sess) => sess.abort_if_errors(),
match self.extract_crate_info(i) {
Some(info) => {
let (cnum, _, _) = self.resolve_crate(&None,
- info.ident.index(&FullRange),
- info.name.index(&FullRange),
+ &info.ident[],
+ &info.name[],
None,
i.span,
PathKind::Crate);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
- validate_crate_name(Some(self.sess), name.index(&FullRange),
+ validate_crate_name(Some(self.sess), &name[],
Some(i.span));
name
}
cstore::NativeUnknown
} else {
self.sess.span_err(m.span,
- format!("unknown kind: `{}`",
- k).index(&FullRange));
+ &format!("unknown kind: `{}`",
+ k)[]);
cstore::NativeUnknown
}
}
match self.sess.opts.externs.get(name) {
Some(locs) => {
let found = locs.iter().any(|l| {
- let l = fs::realpath(&Path::new(l.index(&FullRange))).ok();
+ let l = fs::realpath(&Path::new(&l[])).ok();
l == source.dylib || l == source.rlib
});
if found {
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: self.sess.target_filesearch(kind),
- triple: self.sess.opts.target_triple.index(&FullRange),
+ triple: &self.sess.opts.target_triple[],
root: root,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = self.resolve_crate(root,
- dep.name.index(&FullRange),
- dep.name.index(&FullRange),
+ &dep.name[],
+ &dep.name[],
Some(&dep.hash),
span,
PathKind::Dependency);
pub fn read_plugin_metadata<'b>(&'b mut self,
vi: &'b ast::ViewItem) -> PluginMetadata<'b> {
let info = self.extract_crate_info(vi).unwrap();
- let target_triple = self.sess.opts.target_triple.index(&FullRange);
+ let target_triple = &self.sess.opts.target_triple[];
let is_cross = target_triple != config::host_triple();
let mut should_link = info.should_link && !is_cross;
let mut target_only = false;
let mut load_ctxt = loader::Context {
sess: self.sess,
span: vi.span,
- ident: ident.index(&FullRange),
- crate_name: name.index(&FullRange),
+ ident: &ident[],
+ crate_name: &name[],
hash: None,
filesearch: self.sess.host_filesearch(PathKind::Crate),
triple: config::host_triple(),
let register = should_link && self.existing_match(info.name.as_slice(), None).is_none();
let metadata = if register {
// Register crate now to avoid double-reading metadata
- let (_, cmd, _) = self.register_crate(&None, info.ident.index(&FullRange),
- info.name.index(&FullRange), vi.span, library);
+ let (_, cmd, _) = self.register_crate(&None, &info.ident[],
+ &info.name[], vi.span, library);
PMDSource::Registered(cmd)
} else {
// Not registering the crate; just hold on to the metadata
impl<'a> PluginMetadata<'a> {
/// Read exported macros
pub fn exported_macros(&self) -> Vec<ast::MacroDef> {
- let imported_from = Some(token::intern(self.info.ident.index(&FullRange)).ident());
- let source_name = format!("<{} macros>", self.info.ident.index(&FullRange));
+ let imported_from = Some(token::intern(&self.info.ident[]).ident());
+ let source_name = format!("<{} macros>", &self.info.ident[]);
let mut macros = vec![];
decoder::each_exported_macro(self.metadata.as_slice(),
&*self.sess.cstore.intr,
self.info.ident,
config::host_triple(),
self.sess.opts.target_triple);
- self.sess.span_err(self.vi_span, message.index(&FullRange));
+ self.sess.span_err(self.vi_span, &message[]);
self.sess.abort_if_errors();
}
let message = format!("plugin crate `{}` only found in rlib format, \
but must be available in dylib format",
self.info.ident);
- self.sess.span_err(self.vi_span, message.index(&FullRange));
+ self.sess.span_err(self.vi_span, &message[]);
// No need to abort because the loading code will just ignore this
// empty dylib.
None
// FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace.
- let mut r = vec![ast_map::PathMod(token::intern(cdata.name.index(&FullRange)))];
+ let mut r = vec![ast_map::PathMod(token::intern(&cdata.name[]))];
r.push_all(path.as_slice());
r
}
let mut ret = None;
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u64_from_be_bytes(elt.data, elt.start, 4) as uint;
- if eq_fn(elt.data.index(&((elt.start + 4) .. elt.end))) {
+ if eq_fn(&elt.data[(elt.start + 4) .. elt.end]) {
ret = Some(reader::doc_at(d.data, pos).unwrap().doc);
false
} else {
items: rbml::Doc<'a>) -> Option<rbml::Doc<'a>> {
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
return u64_from_be_bytes(
- bytes.index(&(0u..4u)), 0u, 4u) as ast::NodeId
+ &bytes[0u..4u], 0u, 4u) as ast::NodeId
== item_id;
}
lookup_hash(items,
}
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
let name = docstr(depdoc, tag_crate_dep_crate_name);
- let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).index(&FullRange));
+ let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash)[]);
deps.push(CrateDep {
cnum: crate_num,
name: name,
}
pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
- rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).index(&FullRange));
+ rbml_w.wr_tagged_str(tag_def_id, &def_to_string(id)[]);
}
#[derive(Clone)]
rbml_w.end_tag();
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(s.index(&FullRange));
+ rbml_w.wr_str(&s[]);
rbml_w.end_tag();
}
}
None => {
ecx.diag.handler().bug(
- format!("encode_symbol: id not found {}", id).index(&FullRange));
+ &format!("encode_symbol: id not found {}", id)[]);
}
}
rbml_w.end_tag();
encode_name(rbml_w, variant.node.name.name);
encode_parent_item(rbml_w, local_def(id));
encode_visibility(rbml_w, variant.node.vis);
- encode_attributes(rbml_w, variant.node.attrs.index(&FullRange));
- encode_repr_attrs(rbml_w, ecx, variant.node.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &variant.node.attrs[]);
+ encode_repr_attrs(rbml_w, ecx, &variant.node.attrs[]);
let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id));
encode_stability(rbml_w, stab);
let fields = ty::lookup_struct_fields(ecx.tcx, def_id);
let idx = encode_info_for_struct(ecx,
rbml_w,
- fields.index(&FullRange),
+ &fields[],
index);
- encode_struct_fields(rbml_w, fields.index(&FullRange), def_id);
+ encode_struct_fields(rbml_w, &fields[], def_id);
encode_index(rbml_w, idx, write_i64);
}
}
exp.name, token::get_name(method_name));
rbml_w.start_tag(tag_items_data_item_reexport);
rbml_w.start_tag(tag_items_data_item_reexport_def_id);
- rbml_w.wr_str(def_to_string(method_def_id).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(method_def_id)[]);
rbml_w.end_tag();
rbml_w.start_tag(tag_items_data_item_reexport_name);
- rbml_w.wr_str(format!("{}::{}",
+ rbml_w.wr_str(&format!("{}::{}",
exp.name,
- token::get_name(method_name)).index(&FullRange));
+ token::get_name(method_name))[]);
rbml_w.end_tag();
rbml_w.end_tag();
}
id);
rbml_w.start_tag(tag_items_data_item_reexport);
rbml_w.start_tag(tag_items_data_item_reexport_def_id);
- rbml_w.wr_str(def_to_string(exp.def_id).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(exp.def_id)[]);
rbml_w.end_tag();
rbml_w.start_tag(tag_items_data_item_reexport_name);
rbml_w.wr_str(exp.name.as_str());
// Encode info about all the module children.
for item in md.items.iter() {
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
rbml_w.end_tag();
each_auxiliary_node_id(&**item, |auxiliary_node_id| {
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(def_to_string(local_def(
- auxiliary_node_id)).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(local_def(
+ auxiliary_node_id))[]);
rbml_w.end_tag();
true
});
did, ecx.tcx.map.node_to_string(did));
rbml_w.start_tag(tag_mod_impl);
- rbml_w.wr_str(def_to_string(local_def(did)).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(local_def(did))[]);
rbml_w.end_tag();
}
}
ast::Public => 'y',
ast::Inherited => 'i',
};
- rbml_w.wr_str(ch.to_string().index(&FullRange));
+ rbml_w.wr_str(&ch.to_string()[]);
rbml_w.end_tag();
}
ty::FnMutUnboxedClosureKind => 'm',
ty::FnOnceUnboxedClosureKind => 'o',
};
- rbml_w.wr_str(ch.to_string().index(&FullRange));
+ rbml_w.wr_str(&ch.to_string()[]);
rbml_w.end_tag();
}
rbml_w.end_tag();
rbml_w.wr_tagged_str(tag_region_param_def_def_id,
- def_to_string(param.def_id).index(&FullRange));
+ &def_to_string(param.def_id)[]);
rbml_w.wr_tagged_u64(tag_region_param_def_space,
param.space.to_uint() as u64);
encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
match ast_item_opt {
Some(&ast::MethodImplItem(ref ast_method)) => {
- encode_attributes(rbml_w, ast_method.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &ast_method.attrs[]);
let any_types = !pty.generics.types.is_empty();
- if any_types || is_default_impl || should_inline(ast_method.attrs.index(&FullRange)) {
+ if any_types || is_default_impl || should_inline(&ast_method.attrs[]) {
encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id),
ast_item_opt.unwrap()));
}
match typedef_opt {
None => {}
Some(typedef) => {
- encode_attributes(rbml_w, typedef.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &typedef.attrs[]);
encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx,
typedef.id));
}
encode_path(rbml_w, path);
encode_visibility(rbml_w, vis);
encode_stability(rbml_w, stab);
- encode_attributes(rbml_w, item.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &item.attrs[]);
rbml_w.end_tag();
}
ast::ItemConst(_, _) => {
encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
encode_name(rbml_w, item.ident.name);
encode_path(rbml_w, path);
- encode_attributes(rbml_w, item.attrs.index(&FullRange));
- if tps_len > 0u || should_inline(item.attrs.index(&FullRange)) {
+ encode_attributes(rbml_w, &item.attrs[]);
+ if tps_len > 0u || should_inline(&item.attrs[]) {
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
}
if tps_len == 0 {
encode_info_for_mod(ecx,
rbml_w,
m,
- item.attrs.index(&FullRange),
+ &item.attrs[],
item.id,
path,
item.ident,
// Encode all the items in this module.
for foreign_item in fm.items.iter() {
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(local_def(foreign_item.id))[]);
rbml_w.end_tag();
}
encode_visibility(rbml_w, vis);
encode_item_variances(rbml_w, ecx, item.id);
encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
encode_name(rbml_w, item.ident.name);
- encode_attributes(rbml_w, item.attrs.index(&FullRange));
- encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &item.attrs[]);
+ encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
for v in (*enum_definition).variants.iter() {
encode_variant_id(rbml_w, local_def(v.node.id));
}
encode_enum_variant_info(ecx,
rbml_w,
item.id,
- (*enum_definition).variants.index(&FullRange),
+ &(*enum_definition).variants[],
index);
}
ast::ItemStruct(ref struct_def, _) => {
class itself */
let idx = encode_info_for_struct(ecx,
rbml_w,
- fields.index(&FullRange),
+ &fields[],
index);
/* Index the class*/
encode_item_variances(rbml_w, ecx, item.id);
encode_name(rbml_w, item.ident.name);
- encode_attributes(rbml_w, item.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &item.attrs[]);
encode_path(rbml_w, path.clone());
encode_stability(rbml_w, stab);
encode_visibility(rbml_w, vis);
- encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange));
+ encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
/* Encode def_ids for each field and method
for methods, write all the stuff get_trait_method
needs to know*/
- encode_struct_fields(rbml_w, fields.index(&FullRange), def_id);
+ encode_struct_fields(rbml_w, &fields[], def_id);
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
encode_family(rbml_w, 'i');
encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
encode_name(rbml_w, item.ident.name);
- encode_attributes(rbml_w, item.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &item.attrs[]);
encode_unsafety(rbml_w, unsafety);
encode_polarity(rbml_w, polarity);
match ty.node {
encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics);
encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref);
encode_name(rbml_w, item.ident.name);
- encode_attributes(rbml_w, item.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &item.attrs[]);
encode_visibility(rbml_w, vis);
encode_stability(rbml_w, stab);
for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() {
rbml_w.end_tag();
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(def_to_string(method_def_id.def_id()).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(method_def_id.def_id())[]);
rbml_w.end_tag();
}
encode_path(rbml_w, path.clone());
};
match trait_item {
&ast::RequiredMethod(ref m) => {
- encode_attributes(rbml_w, m.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &m.attrs[]);
encode_trait_item(rbml_w);
encode_item_sort(rbml_w, 'r');
encode_method_argument_names(rbml_w, &*m.decl);
}
&ast::ProvidedMethod(ref m) => {
- encode_attributes(rbml_w, m.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &m.attrs[]);
encode_trait_item(rbml_w);
encode_item_sort(rbml_w, 'p');
encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item));
&ast::TypeTraitItem(ref associated_type) => {
encode_attributes(rbml_w,
- associated_type.attrs.index(&FullRange));
+ &associated_type.attrs[]);
encode_item_sort(rbml_w, 't');
}
}
rbml_w.start_tag(tag_macro_def);
encode_name(rbml_w, def.ident.name);
- encode_attributes(rbml_w, def.attrs.index(&FullRange));
+ encode_attributes(rbml_w, &def.attrs[]);
rbml_w.start_tag(tag_macro_def_body);
- rbml_w.wr_str(pprust::tts_to_string(def.body.index(&FullRange)).index(&FullRange));
+ rbml_w.wr_str(&pprust::tts_to_string(&def.body[])[]);
rbml_w.end_tag();
rbml_w.end_tag();
fn visit_struct_field(&mut self, field: &ast::StructField) {
self.rbml_w.start_tag(tag_struct_field);
self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id);
- encode_attributes(self.rbml_w, field.node.attrs.index(&FullRange));
+ encode_attributes(self.rbml_w, &field.node.attrs[]);
self.rbml_w.end_tag();
}
}
rbml_w.start_tag(tag_misc_info_crate_items);
for item in krate.module.items.iter() {
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
rbml_w.end_tag();
each_auxiliary_node_id(&**item, |auxiliary_node_id| {
rbml_w.start_tag(tag_mod_child);
- rbml_w.wr_str(def_to_string(local_def(
- auxiliary_node_id)).index(&FullRange));
+ rbml_w.wr_str(&def_to_string(local_def(
+ auxiliary_node_id))[]);
rbml_w.end_tag();
true
});
let mut rbml_w = writer::Encoder::new(wr);
- encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.index(&FullRange));
+ encode_crate_name(&mut rbml_w, &ecx.link_meta.crate_name[]);
encode_crate_triple(&mut rbml_w,
- tcx.sess
+ &tcx.sess
.opts
.target_triple
- .index(&FullRange));
+ []);
encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash);
encode_dylib_dependency_formats(&mut rbml_w, &ecx);
let mut i = rbml_w.writer.tell().unwrap();
- encode_attributes(&mut rbml_w, krate.attrs.index(&FullRange));
+ encode_attributes(&mut rbml_w, &krate.attrs[]);
stats.attr_bytes = rbml_w.writer.tell().unwrap() - i;
i = rbml_w.writer.tell().unwrap();
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
- self.sess.span_err(self.span, message.index(&FullRange));
+ self.sess.span_err(self.span, &message[]);
if self.rejected_via_triple.len() > 0 {
let mismatches = self.rejected_via_triple.iter();
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
- format!("crate `{}`, path #{}, triple {}: {}",
- self.ident, i+1, got, path.display()).index(&FullRange));
+ &format!("crate `{}`, path #{}, triple {}: {}",
+ self.ident, i+1, got, path.display())[]);
}
}
if self.rejected_via_hash.len() > 0 {
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
- format!("crate `{}` path {}{}: {}",
- self.ident, "#", i+1, path.display()).index(&FullRange));
+ &format!("crate `{}` path {}{}: {}",
+ self.ident, "#", i+1, path.display())[]);
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
- format!("crate `{}` path #{}: {}",
- r.ident, i+1, path.display()).index(&FullRange));
+ &format!("crate `{}` path #{}: {}",
+ r.ident, i+1, path.display())[]);
}
}
}
None => return FileDoesntMatch,
Some(file) => file,
};
- let (hash, rlib) = if file.starts_with(rlib_prefix.index(&FullRange)) &&
+ let (hash, rlib) = if file.starts_with(&rlib_prefix[]) &&
file.ends_with(".rlib") {
(file.slice(rlib_prefix.len(), file.len() - ".rlib".len()),
true)
1 => Some(libraries.into_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
- format!("multiple matching crates for `{}`",
- self.crate_name).index(&FullRange));
+ &format!("multiple matching crates for `{}`",
+ self.crate_name)[]);
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
- self.sess.note(format!("path: {}",
- p.display()).index(&FullRange));
+ self.sess.note(&format!("path: {}",
+ p.display())[]);
}
None => {}
}
match lib.rlib {
Some(ref p) => {
- self.sess.note(format!("path: {}",
- p.display()).index(&FullRange));
+ self.sess.note(&format!("path: {}",
+ p.display())[]);
}
None => {}
}
let data = lib.metadata.as_slice();
let name = decoder::get_crate_name(data);
- note_crate_name(self.sess.diagnostic(), name.index(&FullRange));
+ note_crate_name(self.sess.diagnostic(), &name[]);
}
None
}
};
if ret.is_some() {
self.sess.span_err(self.span,
- format!("multiple {} candidates for `{}` \
+ &format!("multiple {} candidates for `{}` \
found",
flavor,
- self.crate_name).index(&FullRange));
+ self.crate_name)[]);
self.sess.span_note(self.span,
- format!(r"candidate #1: {}",
+ &format!(r"candidate #1: {}",
ret.as_ref().unwrap()
- .display()).index(&FullRange));
+ .display())[]);
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
- format!(r"candidate #{}: {}", error,
- lib.display()).index(&FullRange));
+ &format!(r"candidate #{}: {}", error,
+ lib.display())[]);
continue
}
*slot = Some(metadata);
let mut rlibs = HashSet::new();
let mut dylibs = HashSet::new();
{
- let mut locs = locs.iter().map(|l| Path::new(l.index(&FullRange))).filter(|loc| {
+ let mut locs = locs.iter().map(|l| Path::new(&l[])).filter(|loc| {
if !loc.exists() {
- sess.err(format!("extern location for {} does not exist: {}",
- self.crate_name, loc.display()).index(&FullRange));
+ sess.err(&format!("extern location for {} does not exist: {}",
+ self.crate_name, loc.display())[]);
return false;
}
let file = match loc.filename_str() {
Some(file) => file,
None => {
- sess.err(format!("extern location for {} is not a file: {}",
- self.crate_name, loc.display()).index(&FullRange));
+ sess.err(&format!("extern location for {} is not a file: {}",
+ self.crate_name, loc.display())[]);
return false;
}
};
return true
} else {
let (ref prefix, ref suffix) = dylibname;
- if file.starts_with(prefix.index(&FullRange)) &&
- file.ends_with(suffix.index(&FullRange)) {
+ if file.starts_with(&prefix[]) &&
+ file.ends_with(&suffix[]) {
return true
}
}
- sess.err(format!("extern location for {} is of an unknown type: {}",
- self.crate_name, loc.display()).index(&FullRange));
+ sess.err(&format!("extern location for {} is of an unknown type: {}",
+ self.crate_name, loc.display())[]);
false
});
}
pub fn note_crate_name(diag: &SpanHandler, name: &str) {
- diag.handler().note(format!("crate name: {}", name).index(&FullRange));
+ diag.handler().note(&format!("crate name: {}", name)[]);
}
impl ArchiveMetadata {
}
let end_pos = st.pos;
st.pos += 1;
- return op(st.data.index(&(start_pos..end_pos)));
+ return op(&st.data[start_pos..end_pos]);
}
pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident {
'~' => ty::UniqTraitStore,
'&' => ty::RegionTraitStore(parse_region_(st, conv), parse_mutability(st)),
c => {
- st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'",
- c).index(&FullRange))
+ st.tcx.sess.bug(&format!("parse_trait_store(): bad input '{}'",
+ c)[])
}
}
}
}
'[' => {
let def = parse_def_(st, RegionParameter, conv);
- let ident = token::str_to_ident(parse_str(st, ']').index(&FullRange));
+ let ident = token::str_to_ident(&parse_str(st, ']')[]);
ty::BrNamed(def, ident.name)
}
'f' => {
assert_eq!(next(st), '|');
let index = parse_u32(st);
assert_eq!(next(st), '|');
- let nm = token::str_to_ident(parse_str(st, ']').index(&FullRange));
+ let nm = token::str_to_ident(&parse_str(st, ']')[]);
ty::ReEarlyBound(node_id, space, index, nm.name)
}
'f' => {
assert_eq!(next(st), '|');
let space = parse_param_space(st);
assert_eq!(next(st), '|');
- let name = token::intern(parse_str(st, ']').index(&FullRange));
+ let name = token::intern(&parse_str(st, ']')[]);
return ty::mk_param(tcx, space, index, name);
}
'~' => return ty::mk_uniq(tcx, parse_ty_(st, conv)),
assert_eq!(next(st), '[');
scan(st, |c| c == ']', |bytes| {
let abi_str = str::from_utf8(bytes).unwrap();
- abi::lookup(abi_str.index(&FullRange)).expect(abi_str)
+ abi::lookup(&abi_str[]).expect(abi_str)
})
}
panic!();
}
- let crate_part = buf.index(&(0u..colon_idx));
- let def_part = buf.index(&((colon_idx + 1u)..len));
+ let crate_part = &buf[0u..colon_idx];
+ let def_part = &buf[(colon_idx + 1u)..len];
let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::<uint>()) {
Some(cn) => cn as ast::CrateNum,
let a_def = match tcx.def_map.borrow().get(&id) {
None => {
tcx.sess.span_bug(ast_ty.span,
- format!("unbound path {}",
- path.repr(tcx)).index(&FullRange))
+ &format!("unbound path {}",
+ path.repr(tcx))[])
}
Some(&d) => d
};
// Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_string(ast_map::Values(path.iter()));
path_as_str = Some(s);
- path_as_str.as_ref().map(|x| x.index(&FullRange))
+ path_as_str.as_ref().map(|x| &x[])
});
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
match c::astencode_tag::from_uint(tag) {
None => {
dcx.tcx.sess.bug(
- format!("unknown tag found in side tables: {:x}",
- tag).index(&FullRange));
+ &format!("unknown tag found in side tables: {:x}",
+ tag)[]);
}
Some(value) => {
let val_doc = entry_doc.get(c::tag_table_val as uint);
}
_ => {
dcx.tcx.sess.bug(
- format!("unknown tag found in side tables: {:x}",
- tag).index(&FullRange));
+ &format!("unknown tag found in side tables: {:x}",
+ tag)[]);
}
}
}
let mut cond_exit = discr_exit;
for arm in arms.iter() {
cond_exit = self.add_dummy_node(&[cond_exit]); // 2
- let pats_exit = self.pats_any(arm.pats.index(&FullRange),
+ let pats_exit = self.pats_any(&arm.pats[],
cond_exit); // 3
let guard_exit = self.opt_expr(&arm.guard,
pats_exit); // 4
}
self.tcx.sess.span_bug(
expr.span,
- format!("no loop scope for id {}",
- loop_id).index(&FullRange));
+ &format!("no loop scope for id {}",
+ loop_id)[]);
}
r => {
self.tcx.sess.span_bug(
expr.span,
- format!("bad entry `{:?}` in def_map for label",
- r).index(&FullRange));
+ &format!("bad entry `{:?}` in def_map for label",
+ r)[]);
}
}
}
}
impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
- fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.index(&FullRange)).unwrap() }
+ fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[]).unwrap() }
fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> {
dot::Id::new(format!("N{}", i.node_id())).unwrap()
let s = self.ast_map.node_to_string(node_id);
// left-aligns the lines
let s = replace_newline_with_backslash_l(s);
- label.push_str(format!("exiting scope_{} {}",
+ label.push_str(&format!("exiting scope_{} {}",
i,
- s.index(&FullRange)).index(&FullRange));
+ &s[])[]);
}
dot::LabelText::EscStr(label.into_cow())
}
Loop => {}
Closure => {
self.sess.span_err(span,
- format!("`{}` inside of a closure", name).index(&FullRange));
+ &format!("`{}` inside of a closure", name)[]);
}
Normal => {
self.sess.span_err(span,
- format!("`{}` outside of loop", name).index(&FullRange));
+ &format!("`{}` outside of loop", name)[]);
}
}
}
/// Pretty-printer for matrices of patterns, example:
/// ++++++++++++++++++++++++++
-/// + _ + .index(&FullRange) +
+/// + _ + [] +
/// ++++++++++++++++++++++++++
/// + true + [First] +
/// ++++++++++++++++++++++++++
// First, check legality of move bindings.
check_legality_of_move_bindings(cx,
arm.guard.is_some(),
- arm.pats.index(&FullRange));
+ &arm.pats[]);
// Second, if there is a guard on each arm, make sure it isn't
// assigning or borrowing anything mutably.
}
// Fourth, check for unreachable arms.
- check_arms(cx, inlined_arms.index(&FullRange), source);
+ check_arms(cx, &inlined_arms[], source);
// Finally, check if the whole match expression is exhaustive.
// Check for empty enum, because is_useful only works on inhabited types.
is_refutable(cx, &*static_inliner.fold_pat((*pat).clone()), |uncovered_pat| {
cx.tcx.sess.span_err(
pat.span,
- format!("refutable pattern in `for` loop binding: \
+ &format!("refutable pattern in `for` loop binding: \
`{}` not covered",
- pat_to_string(uncovered_pat)).index(&FullRange));
+ pat_to_string(uncovered_pat))[]);
});
// Check legality of move bindings.
for pat in pats.iter() {
let v = vec![&**pat];
- match is_useful(cx, &seen, v.index(&FullRange), LeaveOutWitness) {
+ match is_useful(cx, &seen, &v[], LeaveOutWitness) {
NotUseful => {
match source {
ast::MatchSource::IfLetDesugar { .. } => {
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) {
match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) {
UsefulWithWitness(pats) => {
- let witness = match pats.index(&FullRange) {
+ let witness = match &pats[] {
[ref witness] => &**witness,
[] => DUMMY_WILD_PAT,
_ => unreachable!()
UsefulWithWitness(pats) => UsefulWithWitness({
let arity = constructor_arity(cx, &c, left_ty);
let mut result = {
- let pat_slice = pats.index(&FullRange);
+ let pat_slice = &pats[];
let subpats: Vec<_> = range(0, arity).map(|i| {
pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
}).collect();
witness: WitnessPreference) -> Usefulness {
let arity = constructor_arity(cx, &ctor, lty);
let matrix = Matrix(m.iter().filter_map(|r| {
- specialize(cx, r.index(&FullRange), &ctor, 0u, arity)
+ specialize(cx, &r[], &ctor, 0u, arity)
}).collect());
match specialize(cx, v, &ctor, 0u, arity) {
- Some(v) => is_useful(cx, &matrix, v.index(&FullRange), witness),
+ Some(v) => is_useful(cx, &matrix, &v[], witness),
None => NotUseful
}
}
/// This computes the arity of a constructor. The arity of a constructor
/// is how many subpattern patterns of that constructor should be expanded to.
///
-/// For instance, a tuple pattern (_, 42u, Some(.index(&FullRange))) has the arity of 3.
+/// For instance, a tuple pattern (_, 42u, Some([])) has the arity of 3.
/// A struct pattern's arity is the number of fields it contains, etc.
pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint {
match ty.sty {
}
};
head.map(|mut head| {
- head.push_all(r.index(&(0..col)));
- head.push_all(r.index(&((col + 1)..)));
+ head.push_all(&r[0..col]);
+ head.push_all(&r[(col + 1)..]);
head
})
}
_ => {
cx.tcx.sess.span_bug(
p.span,
- format!("binding pattern {} is not an \
+ &format!("binding pattern {} is not an \
identifier: {:?}",
p.id,
- p.node).index(&FullRange));
+ p.node)[]);
}
}
}
return
};
- self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \
- to have {}", suffix).index(&FullRange));
+ self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
+ to have {}", suffix)[]);
}
fn check_static_type(&self, e: &ast::Expr) {
ty::ty_struct(did, _) |
ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => {
self.tcx.sess.span_err(e.span,
- format!("{} are not allowed to have \
- destructors", self.msg()).index(&FullRange))
+ &format!("{} are not allowed to have \
+ destructors", self.msg())[])
}
_ => {}
}
let msg = "constants cannot refer to other statics, \
insert an intermediate constant \
instead";
- self.tcx.sess.span_err(e.span, msg.index(&FullRange));
+ self.tcx.sess.span_err(e.span, &msg[]);
}
_ => {}
}
ast_map::NodeForeignItem(_) => {},
_ => {
self.sess.span_err(e.span,
- format!("expected item, found {}",
- self.ast_map.node_to_string(def_id.node)).index(&FullRange));
+ &format!("expected item, found {}",
+ self.ast_map.node_to_string(def_id.node))[]);
return;
},
}
// target uses". This _includes_ integer-constants, plus the following
// constructors:
//
-// fixed-size vectors and strings: .index(&FullRange) and ""/_
+// fixed-size vectors and strings: [] and ""/_
// vector and string slices: &[] and &""
// tuples: (,)
// enums: foo(...)
None => None,
Some(ast_map::NodeItem(it)) => match it.node {
ast::ItemEnum(ast::EnumDef { ref variants }, _) => {
- variant_expr(variants.index(&FullRange), variant_def.node)
+ variant_expr(&variants[], variant_def.node)
}
_ => None
},
// NOTE this doesn't do the right thing, it compares inlined
// NodeId's to the original variant_def's NodeId, but they
// come from different crates, so they will likely never match.
- variant_expr(variants.index(&FullRange), variant_def.node).map(|e| e.id)
+ variant_expr(&variants[], variant_def.node).map(|e| e.id)
}
_ => None
},
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
match eval_const_expr_partial(tcx, e) {
Ok(r) => r,
- Err(s) => tcx.sess.span_fatal(e.span, s.index(&FullRange))
+ Err(s) => tcx.sess.span_fatal(e.span, &s[])
}
}
let mut t = on_entry.to_vec();
self.apply_gen_kill(cfgidx, t.as_mut_slice());
temp_bits = t;
- temp_bits.index(&FullRange)
+ &temp_bits[]
}
};
debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}",
let bits = self.kills.slice_mut(start, end);
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]",
self.analysis_name, flow_exit, mut_bits_to_string(bits));
- bits.clone_from_slice(orig_kills.index(&FullRange));
+ bits.clone_from_slice(&orig_kills[]);
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]",
self.analysis_name, flow_exit, mut_bits_to_string(bits));
}
let mut v = word;
for _ in range(0u, uint::BYTES) {
result.push(sep);
- result.push_str(format!("{:02x}", v & 0xFF).index(&FullRange));
+ result.push_str(&format!("{:02x}", v & 0xFF)[]);
v >>= 8;
sep = '-';
}
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.rlib.is_some() { return }
- sess.err(format!("dependency `{}` not found in rlib format",
- data.name).index(&FullRange));
+ sess.err(&format!("dependency `{}` not found in rlib format",
+ data.name)[]);
});
return Vec::new();
}
Some(cstore::RequireDynamic) if src.dylib.is_some() => continue,
Some(kind) => {
let data = sess.cstore.get_crate_data(cnum + 1);
- sess.err(format!("crate `{}` required to be available in {}, \
+ sess.err(&format!("crate `{}` required to be available in {}, \
but it was not available in this form",
data.name,
match kind {
cstore::RequireStatic => "rlib",
cstore::RequireDynamic => "dylib",
- }).index(&FullRange));
+ })[]);
}
}
}
// can be refined over time.
if link2 != link || link == cstore::RequireStatic {
let data = sess.cstore.get_crate_data(cnum);
- sess.err(format!("cannot satisfy dependencies so `{}` only \
+ sess.err(&format!("cannot satisfy dependencies so `{}` only \
shows up once",
- data.name).index(&FullRange));
+ data.name)[]);
sess.help("having upstream crates all available in one format \
will likely make this go away");
}
let (m, r) = match self_ty.sty {
ty::ty_rptr(r, ref m) => (m.mutbl, r),
_ => self.tcx().sess.span_bug(expr.span,
- format!("bad overloaded deref type {}",
- method_ty.repr(self.tcx())).index(&FullRange))
+ &format!("bad overloaded deref type {}",
+ method_ty.repr(self.tcx()))[])
};
let bk = ty::BorrowKind::from_mutbl(m);
self.delegate.borrow(expr.id, expr.span, cmt,
let msg = format!("Pattern has unexpected def: {:?} and type {}",
def,
cmt_pat.ty.repr(tcx));
- tcx.sess.span_bug(pat.span, msg.index(&FullRange))
+ tcx.sess.span_bug(pat.span, &msg[])
}
}
}
for _ in a_regions.iter() {
invariance.push(ty::Invariant);
}
- invariance.index(&FullRange)
+ &invariance[]
}
};
(&ty::ty_infer(TyVar(_)), _) |
(_, &ty::ty_infer(TyVar(_))) => {
tcx.sess.bug(
- format!("{}: bot and var types should have been handled ({},{})",
+ &format!("{}: bot and var types should have been handled ({},{})",
this.tag(),
a.repr(this.infcx().tcx),
- b.repr(this.infcx().tcx)).index(&FullRange));
+ b.repr(this.infcx().tcx))[]);
}
(&ty::ty_err, _) | (_, &ty::ty_err) => {
ty::ReEarlyBound(..) => {
self.tcx().sess.span_bug(
self.span,
- format!("Encountered early bound region when generalizing: {}",
- r.repr(self.tcx())).index(&FullRange));
+ &format!("Encountered early bound region when generalizing: {}",
+ r.repr(self.tcx()))[]);
}
// Always make a fresh region variable for skolemized regions;
ref trace_origins,
ref same_regions) => {
if !same_regions.is_empty() {
- self.report_processed_errors(var_origins.index(&FullRange),
- trace_origins.index(&FullRange),
- same_regions.index(&FullRange));
+ self.report_processed_errors(&var_origins[],
+ &trace_origins[],
+ &same_regions[]);
}
}
}
self.tcx.sess.span_err(
trace.origin.span(),
- format!("{}: {} ({})",
+ &format!("{}: {} ({})",
message_root_str,
expected_found_str,
- ty::type_err_to_str(self.tcx, terr)).index(&FullRange));
+ ty::type_err_to_str(self.tcx, terr))[]);
match trace.origin {
infer::MatchExpressionArm(_, arm_span) =>
// Does the required lifetime have a nice name we can print?
self.tcx.sess.span_err(
origin.span(),
- format!("{} may not live long enough", labeled_user_string).index(&FullRange));
+ &format!("{} may not live long enough", labeled_user_string)[]);
self.tcx.sess.span_help(
origin.span(),
- format!(
+ &format!(
"consider adding an explicit lifetime bound `{}: {}`...",
bound_kind.user_string(self.tcx),
- sub.user_string(self.tcx)).index(&FullRange));
+ sub.user_string(self.tcx))[]);
}
ty::ReStatic => {
// Does the required lifetime have a nice name we can print?
self.tcx.sess.span_err(
origin.span(),
- format!("{} may not live long enough", labeled_user_string).index(&FullRange));
+ &format!("{} may not live long enough", labeled_user_string)[]);
self.tcx.sess.span_help(
origin.span(),
- format!(
+ &format!(
"consider adding an explicit lifetime bound `{}: 'static`...",
- bound_kind.user_string(self.tcx)).index(&FullRange));
+ bound_kind.user_string(self.tcx))[]);
}
_ => {
// If not, be less specific.
self.tcx.sess.span_err(
origin.span(),
- format!(
+ &format!(
"{} may not live long enough",
- labeled_user_string).index(&FullRange));
+ labeled_user_string)[]);
self.tcx.sess.span_help(
origin.span(),
- format!(
+ &format!(
"consider adding an explicit lifetime bound for `{}`",
- bound_kind.user_string(self.tcx)).index(&FullRange));
+ bound_kind.user_string(self.tcx))[]);
note_and_explain_region(
self.tcx,
- format!("{} must be valid for ", labeled_user_string).index(&FullRange),
+ &format!("{} must be valid for ", labeled_user_string)[],
sub,
"...");
}
infer::ReborrowUpvar(span, ref upvar_id) => {
self.tcx.sess.span_err(
span,
- format!("lifetime of borrowed pointer outlives \
+ &format!("lifetime of borrowed pointer outlives \
lifetime of captured variable `{}`...",
ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
- .to_string()).index(&FullRange));
+ .to_string())[]);
note_and_explain_region(
self.tcx,
"...the borrowed pointer is valid for ",
"...");
note_and_explain_region(
self.tcx,
- format!("...but `{}` is only valid for ",
+ &format!("...but `{}` is only valid for ",
ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
- .to_string()).index(&FullRange),
+ .to_string())[],
sup,
"");
}
infer::FreeVariable(span, id) => {
self.tcx.sess.span_err(
span,
- format!("captured variable `{}` does not \
+ &format!("captured variable `{}` does not \
outlive the enclosing closure",
ty::local_var_name_str(self.tcx,
id).get()
- .to_string()).index(&FullRange));
+ .to_string())[]);
note_and_explain_region(
self.tcx,
"captured variable is valid for ",
infer::RelateParamBound(span, ty) => {
self.tcx.sess.span_err(
span,
- format!("the type `{}` does not fulfill the \
+ &format!("the type `{}` does not fulfill the \
required lifetime",
- self.ty_to_string(ty)).index(&FullRange));
+ self.ty_to_string(ty))[]);
note_and_explain_region(self.tcx,
"type must outlive ",
sub,
infer::RelateDefaultParamBound(span, ty) => {
self.tcx.sess.span_err(
span,
- format!("the type `{}` (provided as the value of \
+ &format!("the type `{}` (provided as the value of \
a type parameter) is not valid at this point",
- self.ty_to_string(ty)).index(&FullRange));
+ self.ty_to_string(ty))[]);
note_and_explain_region(self.tcx,
"type must outlive ",
sub,
infer::ExprTypeIsNotInScope(t, span) => {
self.tcx.sess.span_err(
span,
- format!("type of expression contains references \
+ &format!("type of expression contains references \
that are not valid during the expression: `{}`",
- self.ty_to_string(t)).index(&FullRange));
+ self.ty_to_string(t))[]);
note_and_explain_region(
self.tcx,
"type is only valid for ",
infer::ReferenceOutlivesReferent(ty, span) => {
self.tcx.sess.span_err(
span,
- format!("in type `{}`, reference has a longer lifetime \
+ &format!("in type `{}`, reference has a longer lifetime \
than the data it references",
- self.ty_to_string(ty)).index(&FullRange));
+ self.ty_to_string(ty))[]);
note_and_explain_region(
self.tcx,
"the pointer is valid for ",
let (fn_decl, generics, unsafety, ident, expl_self, span)
= node_inner.expect("expect item fn");
let taken = lifetimes_in_scope(self.tcx, scope_id);
- let life_giver = LifeGiver::with_taken(taken.index(&FullRange));
+ let life_giver = LifeGiver::with_taken(&taken[]);
let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self,
generics, same_regions, &life_giver);
let (fn_decl, expl_self, generics) = rebuilder.rebuild();
}
expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime,
&anon_nums, ®ion_names);
- inputs = self.rebuild_args_ty(inputs.index(&FullRange), lifetime,
+ inputs = self.rebuild_args_ty(&inputs[], lifetime,
&anon_nums, ®ion_names);
output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names);
ty_params = self.rebuild_ty_params(ty_params, lifetime,
names.push(lt_name);
}
names.sort();
- let name = token::str_to_ident(names[0].index(&FullRange)).name;
+ let name = token::str_to_ident(&names[0][]).name;
return (name_to_dummy_lifetime(name), Kept);
}
return (self.life_giver.give_lifetime(), Fresh);
None => {
self.tcx
.sess
- .fatal(format!(
+ .fatal(&format!(
"unbound path {}",
- pprust::path_to_string(path)).index(&FullRange))
+ pprust::path_to_string(path))[])
}
Some(&d) => d
};
opt_explicit_self, generics);
let msg = format!("consider using an explicit lifetime \
parameter as shown: {}", suggested_fn);
- self.tcx.sess.span_help(span, msg.index(&FullRange));
+ self.tcx.sess.span_help(span, &msg[]);
}
fn report_inference_failure(&self,
self.tcx.sess.span_err(
var_origin.span(),
- format!("cannot infer an appropriate lifetime{} \
+ &format!("cannot infer an appropriate lifetime{} \
due to conflicting requirements",
- var_description).index(&FullRange));
+ var_description)[]);
}
fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) {
Some(values_str) => {
self.tcx.sess.span_note(
trace.origin.span(),
- format!("...so that {} ({})",
- desc, values_str).index(&FullRange));
+ &format!("...so that {} ({})",
+ desc, values_str)[]);
}
None => {
// Really should avoid printing this error at
// doing right now. - nmatsakis
self.tcx.sess.span_note(
trace.origin.span(),
- format!("...so that {}", desc).index(&FullRange));
+ &format!("...so that {}", desc)[]);
}
}
}
infer::ReborrowUpvar(span, ref upvar_id) => {
self.tcx.sess.span_note(
span,
- format!(
+ &format!(
"...so that closure can access `{}`",
ty::local_var_name_str(self.tcx, upvar_id.var_id)
.get()
- .to_string()).index(&FullRange))
+ .to_string())[])
}
infer::InfStackClosure(span) => {
self.tcx.sess.span_note(
infer::FreeVariable(span, id) => {
self.tcx.sess.span_note(
span,
- format!("...so that captured variable `{}` \
+ &format!("...so that captured variable `{}` \
does not outlive the enclosing closure",
ty::local_var_name_str(
self.tcx,
- id).get().to_string()).index(&FullRange));
+ id).get().to_string())[]);
}
infer::IndexSlice(span) => {
self.tcx.sess.span_note(
infer::ExprTypeIsNotInScope(t, span) => {
self.tcx.sess.span_note(
span,
- format!("...so type `{}` of expression is valid during the \
+ &format!("...so type `{}` of expression is valid during the \
expression",
- self.ty_to_string(t)).index(&FullRange));
+ self.ty_to_string(t))[]);
}
infer::BindingTypeIsNotValidAtDecl(span) => {
self.tcx.sess.span_note(
infer::ReferenceOutlivesReferent(ty, span) => {
self.tcx.sess.span_note(
span,
- format!("...so that the reference type `{}` \
+ &format!("...so that the reference type `{}` \
does not outlive the data it points at",
- self.ty_to_string(ty)).index(&FullRange));
+ self.ty_to_string(ty))[]);
}
infer::RelateParamBound(span, t) => {
self.tcx.sess.span_note(
span,
- format!("...so that the type `{}` \
+ &format!("...so that the type `{}` \
will meet the declared lifetime bounds",
- self.ty_to_string(t)).index(&FullRange));
+ self.ty_to_string(t))[]);
}
infer::RelateDefaultParamBound(span, t) => {
self.tcx.sess.span_note(
span,
- format!("...so that type parameter \
+ &format!("...so that type parameter \
instantiated with `{}`, \
will meet its declared lifetime bounds",
- self.ty_to_string(t)).index(&FullRange));
+ self.ty_to_string(t))[]);
}
infer::RelateRegionParamBound(span) => {
self.tcx.sess.span_note(
span,
- format!("...so that the declared lifetime parameter bounds \
- are satisfied").index(&FullRange));
+ &format!("...so that the declared lifetime parameter bounds \
+ are satisfied")[]);
}
}
}
Some(node) => match node {
ast_map::NodeItem(item) => match item.node {
ast::ItemFn(_, _, _, ref gen, _) => {
- taken.push_all(gen.lifetimes.index(&FullRange));
+ taken.push_all(&gen.lifetimes[]);
None
},
_ => None
ast_map::NodeImplItem(ii) => {
match *ii {
ast::MethodImplItem(ref m) => {
- taken.push_all(m.pe_generics().lifetimes.index(&FullRange));
+ taken.push_all(&m.pe_generics().lifetimes[]);
Some(m.id)
}
ast::TypeImplItem(_) => None,
let mut lifetime;
loop {
let mut s = String::from_str("'");
- s.push_str(num_to_string(self.counter.get()).index(&FullRange));
+ s.push_str(&num_to_string(self.counter.get())[]);
if !self.taken.contains(&s) {
lifetime = name_to_dummy_lifetime(
- token::str_to_ident(s.index(&FullRange)).name);
+ token::str_to_ident(&s[]).name);
self.generated.borrow_mut().push(lifetime);
break;
}
infcx.tcx.sess.span_bug(
span,
- format!("region {:?} is not associated with \
+ &format!("region {:?} is not associated with \
any bound region from A!",
- r0).index(&FullRange))
+ r0)[])
}
}
}
infcx.tcx.sess.span_bug(
span,
- format!("could not find original bound region for {:?}", r).index(&FullRange));
+ &format!("could not find original bound region for {:?}", r)[]);
}
fn fresh_bound_variable(infcx: &InferCtxt, debruijn: ty::DebruijnIndex) -> ty::Region {
r => {
combiner.infcx().tcx.sess.span_bug(
combiner.trace().origin.span(),
- format!("found non-region-vid: {:?}", r).index(&FullRange));
+ &format!("found non-region-vid: {:?}", r)[]);
}
}).collect()
}
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
});
- self.tcx.sess.span_err(sp, format!("{}{}",
+ self.tcx.sess.span_err(sp, &format!("{}{}",
mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty),
- error_str).index(&FullRange));
+ error_str)[]);
for err in err.iter() {
ty::note_and_explain_type_err(self.tcx, *err)
(_, ReLateBound(..)) => {
self.tcx.sess.span_bug(
origin.span(),
- format!("cannot relate bound region: {} <= {}",
+ &format!("cannot relate bound region: {} <= {}",
sub.repr(self.tcx),
- sup.repr(self.tcx)).index(&FullRange));
+ sup.repr(self.tcx))[]);
}
(_, ReStatic) => {
// all regions are subregions of static, so we can ignore this
(ReEarlyBound(..), _) |
(_, ReEarlyBound(..)) => {
self.tcx.sess.bug(
- format!("cannot relate bound region: LUB({}, {})",
+ &format!("cannot relate bound region: LUB({}, {})",
a.repr(self.tcx),
- b.repr(self.tcx)).index(&FullRange));
+ b.repr(self.tcx))[]);
}
(ReStatic, _) | (_, ReStatic) => {
(ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => {
self.tcx.sess.span_bug(
(*self.var_origins.borrow())[v_id.index as uint].span(),
- format!("lub_concrete_regions invoked with \
+ &format!("lub_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}",
a,
- b).index(&FullRange));
+ b)[]);
}
(ReFree(ref fr), ReScope(s_id)) |
(ReEarlyBound(..), _) |
(_, ReEarlyBound(..)) => {
self.tcx.sess.bug(
- format!("cannot relate bound region: GLB({}, {})",
+ &format!("cannot relate bound region: GLB({}, {})",
a.repr(self.tcx),
- b.repr(self.tcx)).index(&FullRange));
+ b.repr(self.tcx))[]);
}
(ReStatic, r) | (r, ReStatic) => {
(_, ReInfer(ReVar(v_id))) => {
self.tcx.sess.span_bug(
(*self.var_origins.borrow())[v_id.index as uint].span(),
- format!("glb_concrete_regions invoked with \
+ &format!("glb_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}",
a,
- b).index(&FullRange));
+ b)[]);
}
(ReFree(ref fr), ReScope(s_id)) |
self.expansion(var_data.as_mut_slice());
self.contraction(var_data.as_mut_slice());
let values =
- self.extract_values_and_collect_conflicts(var_data.index(&FullRange),
+ self.extract_values_and_collect_conflicts(&var_data[],
errors);
self.collect_concrete_region_errors(&values, errors);
values
self.tcx.sess.span_bug(
(*self.var_origins.borrow())[node_idx.index as uint].span(),
- format!("collect_error_for_expanding_node() could not find error \
+ &format!("collect_error_for_expanding_node() could not find error \
for var {:?}, lower_bounds={}, upper_bounds={}",
node_idx,
lower_bounds.repr(self.tcx),
- upper_bounds.repr(self.tcx)).index(&FullRange));
+ upper_bounds.repr(self.tcx))[]);
}
fn collect_error_for_contracting_node(
self.tcx.sess.span_bug(
(*self.var_origins.borrow())[node_idx.index as uint].span(),
- format!("collect_error_for_contracting_node() could not find error \
+ &format!("collect_error_for_contracting_node() could not find error \
for var {:?}, upper_bounds={}",
node_idx,
- upper_bounds.repr(self.tcx)).index(&FullRange));
+ upper_bounds.repr(self.tcx))[]);
}
fn collect_concrete_regions(&self,
}
ty::ty_infer(_) => {
self.infcx.tcx.sess.bug(
- format!("Unexpected type in full type resolver: {}",
- t.repr(self.infcx.tcx)).index(&FullRange));
+ &format!("Unexpected type in full type resolver: {}",
+ t.repr(self.infcx.tcx))[]);
}
_ => {
ty_fold::super_fold_ty(self, t)
None => {
self.tcx
.sess
- .span_bug(span, format!("no variable registered for id {}",
- node_id).index(&FullRange));
+ .span_bug(span, &format!("no variable registered for id {}",
+ node_id)[]);
}
}
}
// creating liveness nodes for.
self.ir.tcx.sess.span_bug(
span,
- format!("no live node registered for node {}",
- node_id).index(&FullRange));
+ &format!("no live node registered for node {}",
+ node_id)[]);
}
}
}
// Uninteresting cases: just propagate in rev exec order
ast::ExprVec(ref exprs) => {
- self.propagate_through_exprs(exprs.index(&FullRange), succ)
+ self.propagate_through_exprs(&exprs[], succ)
}
ast::ExprRepeat(ref element, ref count) => {
} else {
succ
};
- let succ = self.propagate_through_exprs(args.index(&FullRange), succ);
+ let succ = self.propagate_through_exprs(&args[], succ);
self.propagate_through_expr(&**f, succ)
}
} else {
succ
};
- self.propagate_through_exprs(args.index(&FullRange), succ)
+ self.propagate_through_exprs(&args[], succ)
}
ast::ExprTup(ref exprs) => {
- self.propagate_through_exprs(exprs.index(&FullRange), succ)
+ self.propagate_through_exprs(&exprs[], succ)
}
ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => {
_ => {
self.tcx().sess.span_bug(
span,
- format!("Upvar of non-closure {} - {}",
+ &format!("Upvar of non-closure {} - {}",
fn_node_id,
- ty.repr(self.tcx())).index(&FullRange));
+ ty.repr(self.tcx()))[]);
}
}
}
match result {
None => true,
Some((span, msg, note)) => {
- self.tcx.sess.span_err(span, msg.index(&FullRange));
+ self.tcx.sess.span_err(span, &msg[]);
match note {
Some((span, msg)) => {
- self.tcx.sess.span_note(span, msg.index(&FullRange))
+ self.tcx.sess.span_note(span, &msg[])
}
None => {},
}
UnnamedField(idx) => format!("field #{} of {} is private",
idx + 1, struct_desc),
};
- self.tcx.sess.span_err(span, msg.index(&FullRange));
+ self.tcx.sess.span_err(span, &msg[]);
}
// Given the ID of a method, checks to ensure it's in scope.
self.report_error(self.ensure_public(span,
method_id,
None,
- format!("method `{}`",
- string).index(&FullRange)));
+ &format!("method `{}`",
+ string)[]));
}
// Checks that a path is in scope.
self.ensure_public(span,
def,
Some(origdid),
- format!("{} `{}`", tyname, name).index(&FullRange))
+ &format!("{} `{}`", tyname, name)[])
};
match self.last_private_map[path_id] {
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(item: &ast::Item) -> bool {
- if attributes_specify_inlining(item.attrs.index(&FullRange)) {
+ if attributes_specify_inlining(&item.attrs[]) {
return true
}
fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method,
impl_src: ast::DefId) -> bool {
- if attributes_specify_inlining(method.attrs.index(&FullRange)) ||
+ if attributes_specify_inlining(&method.attrs[]) ||
generics_require_inlining(method.pe_generics()) {
return true
}
ast::MethodImplItem(ref method) => {
if generics_require_inlining(method.pe_generics()) ||
attributes_specify_inlining(
- method.attrs.index(&FullRange)) {
+ &method.attrs[]) {
true
} else {
let impl_did = self.tcx
Some(ref item) => self.propagate_node(item, search_item),
None if search_item == ast::CRATE_NODE_ID => {}
None => {
- self.tcx.sess.bug(format!("found unmapped ID in worklist: \
+ self.tcx.sess.bug(&format!("found unmapped ID in worklist: \
{}",
- search_item).index(&FullRange))
+ search_item)[])
}
}
}
_ => {
self.tcx
.sess
- .bug(format!("found unexpected thingy in worklist: {}",
+ .bug(&format!("found unexpected thingy in worklist: {}",
self.tcx
.map
- .node_to_string(search_item)).index(&FullRange))
+ .node_to_string(search_item))[])
}
}
}
// A, but the inner rvalues `a()` and `b()` have an extended lifetime
// due to rule C.
//
- // FIXME(#6308) -- Note that `.index(&FullRange)` patterns work more smoothly post-DST.
+ // FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST.
match local.init {
Some(ref expr) => {
fn unresolved_lifetime_ref(&self, lifetime_ref: &ast::Lifetime) {
self.sess.span_err(
lifetime_ref.span,
- format!("use of undeclared lifetime name `{}`",
- token::get_name(lifetime_ref.name)).index(&FullRange));
+ &format!("use of undeclared lifetime name `{}`",
+ token::get_name(lifetime_ref.name))[]);
}
fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec<ast::LifetimeDef>) {
if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) {
self.sess.span_err(
lifetime.lifetime.span,
- format!("illegal lifetime parameter name: `{}`",
+ &format!("illegal lifetime parameter name: `{}`",
token::get_name(lifetime.lifetime.name))
- .index(&FullRange));
+ []);
}
}
if lifetime_i.lifetime.name == lifetime_j.lifetime.name {
self.sess.span_err(
lifetime_j.lifetime.span,
- format!("lifetime name `{}` declared twice in \
+ &format!("lifetime name `{}` declared twice in \
the same scope",
token::get_name(lifetime_j.lifetime.name))
- .index(&FullRange));
+ []);
}
}
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
- format!("Type parameter out of range \
+ &format!("Type parameter out of range \
when substituting in region {} (root type={}) \
(space={:?}, index={})",
region_name.as_str(),
self.root_ty.repr(self.tcx()),
- space, i).index(&FullRange));
+ space, i)[]);
}
}
}
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
- format!("Type parameter `{}` ({}/{:?}/{}) out of range \
+ &format!("Type parameter `{}` ({}/{:?}/{}) out of range \
when substituting (root type={}) substs={}",
p.repr(self.tcx()),
source_ty.repr(self.tcx()),
p.space,
p.idx,
self.root_ty.repr(self.tcx()),
- self.substs.repr(self.tcx())).index(&FullRange));
+ self.substs.repr(self.tcx()))[]);
}
};
ty::ty_open(..) |
ty::ty_err => {
tcx.sess.bug(
- format!("ty_is_local invoked on unexpected type: {}",
- ty.repr(tcx)).index(&FullRange))
+ &format!("ty_is_local invoked on unexpected type: {}",
+ ty.repr(tcx))[])
}
}
}
let suggested_limit = current_limit * 2;
tcx.sess.span_note(
span,
- format!(
+ &format!(
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
- suggested_limit).index(&FullRange));
+ suggested_limit)[]);
}
}
pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] {
- self.predicates.index(&FullRange)
+ &self.predicates[]
}
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
// The `Self` type is erased, so it should not appear in list of
// arguments or return type apart from the receiver.
let ref sig = method.fty.sig;
- for &input_ty in sig.0.inputs.index(&(1..)).iter() {
+ for &input_ty in sig.0.inputs[1..].iter() {
if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) {
return Some(MethodViolationCode::ReferencesSelf);
}
let all_bounds =
util::transitive_bounds(
- self.tcx(), caller_trait_refs.index(&FullRange));
+ self.tcx(), &caller_trait_refs[]);
let matching_bounds =
all_bounds.filter(
ty::ty_infer(ty::FreshTy(_)) |
ty::ty_infer(ty::FreshIntTy(_)) => {
self.tcx().sess.bug(
- format!(
+ &format!(
"asked to assemble builtin bounds of unexpected type: {}",
- self_ty.repr(self.tcx())).index(&FullRange));
+ self_ty.repr(self.tcx()))[]);
}
};
AmbiguousBuiltin | ParameterBuiltin => {
self.tcx().sess.span_bug(
obligation.cause.span,
- format!("builtin bound for {} was ambig",
- obligation.repr(self.tcx())).index(&FullRange));
+ &format!("builtin bound for {} was ambig",
+ obligation.repr(self.tcx()))[]);
}
}
}
_ => {
self.tcx().sess.span_bug(
obligation.cause.span,
- format!("Fn pointer candidate for inappropriate self type: {}",
- self_ty.repr(self.tcx())).index(&FullRange));
+ &format!("Fn pointer candidate for inappropriate self type: {}",
+ self_ty.repr(self.tcx()))[]);
}
};
}
Err(()) => {
self.tcx().sess.bug(
- format!("Impl {} was matchable against {} but now is not",
+ &format!("Impl {} was matchable against {} but now is not",
impl_def_id.repr(self.tcx()),
- obligation.repr(self.tcx())).index(&FullRange));
+ obligation.repr(self.tcx()))[]);
}
}
}
/// struct Foo<T,U:Bar<T>> { ... }
///
/// Here, the `Generics` for `Foo` would contain a list of bounds like
-/// `[.index(&FullRange), [U:Bar<T>]]`. Now if there were some particular reference
-/// like `Foo<int,uint>`, then the `GenericBounds` would be `[.index(&FullRange),
+/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
+/// like `Foo<int,uint>`, then the `GenericBounds` would be `[[],
/// [uint:Bar<int>]]`.
#[derive(Clone, Show)]
pub struct GenericBounds<'tcx> {
ParameterEnvironment::for_item(cx, cx.map.get_parent(id))
}
_ => {
- cx.sess.bug(format!("ParameterEnvironment::from_item(): \
+ cx.sess.bug(&format!("ParameterEnvironment::from_item(): \
`{}` is not an item",
- cx.map.node_to_string(id)).index(&FullRange))
+ cx.map.node_to_string(id))[])
}
}
}
};
match result {
Ok(trait_did) => trait_did,
- Err(err) => cx.sess.fatal(err.index(&FullRange)),
+ Err(err) => cx.sess.fatal(&err[]),
}
}
}
}
&ty_tup(ref ts) => {
- self.add_tys(ts.index(&FullRange));
+ self.add_tys(&ts[]);
}
&ty_bare_fn(_, ref f) => {
fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) {
let mut computation = FlagComputation::new();
- computation.add_tys(fn_sig.0.inputs.index(&FullRange));
+ computation.add_tys(&fn_sig.0.inputs[]);
if let ty::FnConverging(output) = fn_sig.0.output {
computation.add_ty(output);
fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool {
bounds.len() == 0 ||
- bounds.index(&(1..)).iter().enumerate().all(
+ bounds[1..].iter().enumerate().all(
|(index, bound)| bounds[index].sort_key() <= bound.sort_key())
}
ty_vec(ty, _) => ty,
ty_str => mk_mach_uint(cx, ast::TyU8),
ty_open(ty) => sequence_element_type(cx, ty),
- _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}",
- ty_to_string(cx, ty)).index(&FullRange)),
+ _ => cx.sess.bug(&format!("sequence_element_type called on non-sequence value: {}",
+ ty_to_string(cx, ty))[]),
}
}
ty_struct(did, substs) => {
let flds = struct_fields(cx, did, substs);
let mut res =
- TypeContents::union(flds.index(&FullRange),
+ TypeContents::union(&flds[],
|f| tc_mt(cx, f.mt, cache));
if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) {
}
ty_tup(ref tys) => {
- TypeContents::union(tys.index(&FullRange),
+ TypeContents::union(&tys[],
|ty| tc_ty(cx, *ty, cache))
}
ty_enum(did, substs) => {
let variants = substd_enum_variants(cx, did, substs);
let mut res =
- TypeContents::union(variants.index(&FullRange), |variant| {
- TypeContents::union(variant.args.index(&FullRange),
+ TypeContents::union(&variants[], |variant| {
+ TypeContents::union(&variant.args[],
|arg_ty| {
tc_ty(cx, *arg_ty, cache)
})
pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
match ty.sty {
ty_open(ty) => mk_rptr(cx, cx.mk_region(ReStatic), mt {ty: ty, mutbl:ast::MutImmutable}),
- _ => cx.sess.bug(format!("Trying to close a non-open type {}",
- ty_to_string(cx, ty)).index(&FullRange))
+ _ => cx.sess.bug(&format!("Trying to close a non-open type {}",
+ ty_to_string(cx, ty))[])
}
}
match cx.trait_refs.borrow().get(&id) {
Some(ty) => ty.clone(),
None => cx.sess.bug(
- format!("node_id_to_trait_ref: no trait ref for node `{}`",
- cx.map.node_to_string(id)).index(&FullRange))
+ &format!("node_id_to_trait_ref: no trait ref for node `{}`",
+ cx.map.node_to_string(id))[])
}
}
match try_node_id_to_type(cx, id) {
Some(ty) => ty,
None => cx.sess.bug(
- format!("node_id_to_type: no type for node `{}`",
- cx.map.node_to_string(id)).index(&FullRange))
+ &format!("node_id_to_type: no type for node `{}`",
+ cx.map.node_to_string(id))[])
}
}
ref s => {
tcx.sess.span_bug(
span,
- format!("ty_region() invoked on an inappropriate ty: {:?}",
- s).index(&FullRange));
+ &format!("ty_region() invoked on an inappropriate ty: {:?}",
+ s)[]);
}
}
}
e.span
}
Some(f) => {
- cx.sess.bug(format!("Node id {} is not an expr: {:?}",
+ cx.sess.bug(&format!("Node id {} is not an expr: {:?}",
id,
- f).index(&FullRange));
+ f)[]);
}
None => {
- cx.sess.bug(format!("Node id {} is not present \
- in the node map", id).index(&FullRange));
+ cx.sess.bug(&format!("Node id {} is not present \
+ in the node map", id)[]);
}
}
}
}
_ => {
cx.sess.bug(
- format!("Variable id {} maps to {:?}, not local",
+ &format!("Variable id {} maps to {:?}, not local",
id,
- pat).index(&FullRange));
+ pat)[]);
}
}
}
r => {
- cx.sess.bug(format!("Variable id {} maps to {:?}, not local",
+ cx.sess.bug(&format!("Variable id {} maps to {:?}, not local",
id,
- r).index(&FullRange));
+ r)[]);
}
}
}
}
ref b => {
cx.sess.bug(
- format!("AdjustReifyFnPointer adjustment on non-fn-item: \
+ &format!("AdjustReifyFnPointer adjustment on non-fn-item: \
{:?}",
- b).index(&FullRange));
+ b)[]);
}
}
}
None => {
cx.sess.span_bug(
span,
- format!("the {}th autoderef failed: \
+ &format!("the {}th autoderef failed: \
{}",
i,
ty_to_string(cx, adjusted_ty))
- .index(&FullRange));
+ []);
}
}
}
mk_vec(cx, ty, None)
}
_ => cx.sess.span_bug(span,
- format!("UnsizeLength with bad sty: {:?}",
- ty_to_string(cx, ty)).index(&FullRange))
+ &format!("UnsizeLength with bad sty: {:?}",
+ ty_to_string(cx, ty))[])
},
&UnsizeStruct(box ref k, tp_index) => match ty.sty {
ty_struct(did, substs) => {
mk_struct(cx, did, cx.mk_substs(unsized_substs))
}
_ => cx.sess.span_bug(span,
- format!("UnsizeStruct with bad sty: {:?}",
- ty_to_string(cx, ty)).index(&FullRange))
+ &format!("UnsizeStruct with bad sty: {:?}",
+ ty_to_string(cx, ty))[])
},
&UnsizeVtable(TyTrait { ref principal, ref bounds }, _) => {
mk_trait(cx, principal.clone(), bounds.clone())
match tcx.def_map.borrow().get(&expr.id) {
Some(&def) => def,
None => {
- tcx.sess.span_bug(expr.span, format!(
- "no def-map entry for expr {}", expr.id).index(&FullRange));
+ tcx.sess.span_bug(expr.span, &format!(
+ "no def-map entry for expr {}", expr.id)[]);
}
}
}
def => {
tcx.sess.span_bug(
expr.span,
- format!("uncategorized def for expr {}: {:?}",
+ &format!("uncategorized def for expr {}: {:?}",
expr.id,
- def).index(&FullRange));
+ def)[]);
}
}
}
-> uint {
let mut i = 0u;
for f in fields.iter() { if f.name == name { return i; } i += 1u; }
- tcx.sess.bug(format!(
+ tcx.sess.bug(&format!(
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
fields.iter()
.map(|f| token::get_name(f.name).get().to_string())
- .collect::<Vec<String>>()).index(&FullRange));
+ .collect::<Vec<String>>())[]);
}
pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem])
match item.node {
ItemTrait(_, _, _, ref ms) => {
let (_, p) =
- ast_util::split_trait_methods(ms.index(&FullRange));
+ ast_util::split_trait_methods(&ms[]);
p.iter()
.map(|m| {
match impl_or_trait_item(
}).collect()
}
_ => {
- cx.sess.bug(format!("provided_trait_methods: `{:?}` is \
+ cx.sess.bug(&format!("provided_trait_methods: `{:?}` is \
not a trait",
- id).index(&FullRange))
+ id)[])
}
}
}
_ => {
- cx.sess.bug(format!("provided_trait_methods: `{:?}` is not a \
+ cx.sess.bug(&format!("provided_trait_methods: `{:?}` is not a \
trait",
- id).index(&FullRange))
+ id)[])
}
}
} else {
};
},
ast::StructVariantKind(ref struct_def) => {
- let fields: &[StructField] = struct_def.fields.index(&FullRange);
+ let fields: &[StructField] = &struct_def.fields[];
assert!(fields.len() > 0);
Err(ref err) => {
cx.sess
.span_err(e.span,
- format!("expected constant: {}",
- *err).index(&FullRange));
+ &format!("expected constant: {}",
+ *err)[]);
}
},
None => {}
Some(fields) => (**fields).clone(),
_ => {
cx.sess.bug(
- format!("ID not mapped to struct fields: {}",
- cx.map.node_to_string(did.node)).index(&FullRange));
+ &format!("ID not mapped to struct fields: {}",
+ cx.map.node_to_string(did.node))[]);
}
}
} else {
pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec<field<'tcx>> {
v.iter().enumerate().map(|(i, &f)| {
field {
- name: token::intern(i.to_string().index(&FullRange)),
+ name: token::intern(&i.to_string()[]),
mt: mt {
ty: f,
mutbl: MutImmutable
const_eval::const_binary(_) =>
"binary array"
};
- tcx.sess.span_err(count_expr.span, format!(
+ tcx.sess.span_err(count_expr.span, &format!(
"expected positive integer for repeat count, found {}",
- found).index(&FullRange));
+ found)[]);
}
Err(_) => {
let found = match count_expr.node {
_ =>
"non-constant expression"
};
- tcx.sess.span_err(count_expr.span, format!(
+ tcx.sess.span_err(count_expr.span, &format!(
"expected constant integer for repeat count, found {}",
- found).index(&FullRange));
+ found)[]);
}
}
0
{
match tcx.freevars.borrow().get(&fid) {
None => f(&[]),
- Some(d) => f(d.index(&FullRange))
+ Some(d) => f(&d[])
}
}
// this is fatal: there are almost certainly macros we need
// inside this crate, so continue would spew "macro undefined"
// errors
- Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange))
+ Err(err) => self.sess.span_fatal(vi.span, &err[])
};
unsafe {
let registrar =
- match lib.symbol(symbol.index(&FullRange)) {
+ match lib.symbol(&symbol[]) {
Ok(registrar) => {
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
}
// again fatal if we can't register macros
- Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange))
+ Err(err) => self.sess.span_fatal(vi.span, &err[])
};
// Intentionally leak the dynamic library. We can't ever unload it
if !setter(&mut cg, value) {
match (value, opt_type_desc) {
(Some(..), None) => {
- early_error(format!("codegen option `{}` takes no \
- value", key).index(&FullRange))
+ early_error(&format!("codegen option `{}` takes no \
+ value", key)[])
}
(None, Some(type_desc)) => {
- early_error(format!("codegen option `{0}` requires \
+ early_error(&format!("codegen option `{0}` requires \
{1} (-C {0}=<value>)",
- key, type_desc).index(&FullRange))
+ key, type_desc)[])
}
(Some(value), Some(type_desc)) => {
- early_error(format!("incorrect value `{}` for codegen \
+ early_error(&format!("incorrect value `{}` for codegen \
option `{}` - {} was expected",
- value, key, type_desc).index(&FullRange))
+ value, key, type_desc)[])
}
(None, None) => unreachable!()
}
break;
}
if !found {
- early_error(format!("unknown codegen option: `{}`",
- key).index(&FullRange));
+ early_error(&format!("unknown codegen option: `{}`",
+ key)[]);
}
}
return cg;
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
use syntax::parse::token::intern_and_get_ident as intern;
- let end = sess.target.target.target_endian.index(&FullRange);
- let arch = sess.target.target.arch.index(&FullRange);
- let wordsz = sess.target.target.target_word_size.index(&FullRange);
- let os = sess.target.target.target_os.index(&FullRange);
+ let end = &sess.target.target.target_endian[];
+ let arch = &sess.target.target.arch[];
+ let wordsz = &sess.target.target.target_word_size[];
+ let os = &sess.target.target.target_os[];
let fam = match sess.target.target.options.is_like_windows {
true => InternedString::new("windows"),
append_configuration(&mut user_cfg, InternedString::new("test"))
}
let mut v = user_cfg.into_iter().collect::<Vec<_>>();
- v.push_all(default_cfg.index(&FullRange));
+ v.push_all(&default_cfg[]);
v
}
pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config {
- let target = match Target::search(opts.target_triple.index(&FullRange)) {
+ let target = match Target::search(&opts.target_triple[]) {
Ok(t) => t,
Err(e) => {
sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice());
}
};
- let (int_type, uint_type) = match target.target_word_size.index(&FullRange) {
+ let (int_type, uint_type) = match &target.target_word_size[] {
"32" => (ast::TyI32, ast::TyU32),
"64" => (ast::TyI64, ast::TyU64),
- w => sp.handler().fatal((format!("target specification was invalid: unrecognized \
- target-word-size {}", w)).index(&FullRange))
+ w => sp.handler().fatal(&format!("target specification was invalid: unrecognized \
+ target-word-size {}", w)[])
};
Config {
let unparsed_crate_types = matches.opt_strs("crate-type");
let crate_types = parse_crate_types_from_list(unparsed_crate_types)
- .unwrap_or_else(|e| early_error(e.index(&FullRange)));
+ .unwrap_or_else(|e| early_error(&e[]));
let mut lint_opts = vec!();
let mut describe_lints = false;
}
}
if this_bit == 0 {
- early_error(format!("unknown debug flag: {}",
- *debug_flag).index(&FullRange))
+ early_error(&format!("unknown debug flag: {}",
+ *debug_flag)[])
}
debugging_opts |= this_bit;
}
"link" => OutputTypeExe,
"dep-info" => OutputTypeDepInfo,
_ => {
- early_error(format!("unknown emission type: `{}`",
- part).index(&FullRange))
+ early_error(&format!("unknown emission type: `{}`",
+ part)[])
}
};
output_types.push(output_type)
Some("2") => Default,
Some("3") => Aggressive,
Some(arg) => {
- early_error(format!("optimization level needs to be \
+ early_error(&format!("optimization level needs to be \
between 0-3 (instead was `{}`)",
- arg).index(&FullRange));
+ arg)[]);
}
}
} else {
None |
Some("2") => FullDebugInfo,
Some(arg) => {
- early_error(format!("debug info level needs to be between \
+ early_error(&format!("debug info level needs to be between \
0-2 (instead was `{}`)",
- arg).index(&FullRange));
+ arg)[]);
}
}
} else {
let mut search_paths = SearchPaths::new();
for s in matches.opt_strs("L").iter() {
- search_paths.add_path(s.index(&FullRange));
+ search_paths.add_path(&s[]);
}
let libs = matches.opt_strs("l").into_iter().map(|s| {
(Some(name), "framework") => (name, cstore::NativeFramework),
(Some(name), "static") => (name, cstore::NativeStatic),
(_, s) => {
- early_error(format!("unknown library kind `{}`, expected \
+ early_error(&format!("unknown library kind `{}`, expected \
one of dylib, framework, or static",
- s).index(&FullRange));
+ s)[]);
}
};
(name.to_string(), kind)
--debuginfo");
}
- let color = match matches.opt_str("color").as_ref().map(|s| s.index(&FullRange)) {
+ let color = match matches.opt_str("color").as_ref().map(|s| &s[]) {
Some("auto") => Auto,
Some("always") => Always,
Some("never") => Never,
None => Auto,
Some(arg) => {
- early_error(format!("argument for --color must be auto, always \
+ early_error(&format!("argument for --color must be auto, always \
or never (instead was `{}`)",
- arg).index(&FullRange))
+ arg)[])
}
};
#[test]
fn test_switch_implies_cfg_test() {
let matches =
- &match getopts(&["--test".to_string()], optgroups().index(&FullRange)) {
+ &match getopts(&["--test".to_string()], &optgroups()[]) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test: {}", f)
};
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess);
- assert!((attr::contains_name(cfg.index(&FullRange), "test")));
+ assert!((attr::contains_name(&cfg[], "test")));
}
// When the user supplies --test and --cfg test, don't implicitly add
fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches =
&match getopts(&["--test".to_string(), "--cfg=test".to_string()],
- optgroups().index(&FullRange)) {
+ &optgroups()[]) {
Ok(m) => m,
Err(f) => {
panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f)
{
let matches = getopts(&[
"-Awarnings".to_string()
- ], optgroups().index(&FullRange)).unwrap();
+ ], &optgroups()[]).unwrap();
let registry = diagnostics::registry::Registry::new(&[]);
let sessopts = build_session_options(&matches);
let sess = build_session(sessopts, None, registry);
let matches = getopts(&[
"-Awarnings".to_string(),
"-Dwarnings".to_string()
- ], optgroups().index(&FullRange)).unwrap();
+ ], &optgroups()[]).unwrap();
let registry = diagnostics::registry::Registry::new(&[]);
let sessopts = build_session_options(&matches);
let sess = build_session(sessopts, None, registry);
{
let matches = getopts(&[
"-Adead_code".to_string()
- ], optgroups().index(&FullRange)).unwrap();
+ ], &optgroups()[]).unwrap();
let registry = diagnostics::registry::Registry::new(&[]);
let sessopts = build_session_options(&matches);
let sess = build_session(sessopts, None, registry);
// cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp,
- format!("impossible case reached: {}", msg).index(&FullRange));
+ &format!("impossible case reached: {}", msg)[]);
}
pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) }
pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) }
}
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(self.sysroot(),
- self.opts.target_triple.index(&FullRange),
+ &self.opts.target_triple[],
&self.opts.search_paths,
kind)
}
for c in range(0u32, MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
- assert_eq!(lev_distance(c.index(&FullRange), c.index(&FullRange)), 0);
+ assert_eq!(lev_distance(&c[], &c[]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
(ref str, Some(span)) => {
cx.sess.span_note(
span,
- format!("{}{}{}", prefix, *str, suffix).index(&FullRange));
+ &format!("{}{}{}", prefix, *str, suffix)[]);
Some(span)
}
(ref str, None) => {
cx.sess.note(
- format!("{}{}{}", prefix, *str, suffix).index(&FullRange));
+ &format!("{}{}{}", prefix, *str, suffix)[]);
None
}
}
};
if abi != abi::Rust {
- s.push_str(format!("extern {} ", abi.to_string()).index(&FullRange));
+ s.push_str(&format!("extern {} ", abi.to_string())[]);
};
s.push_str("fn");
Some(def_id) => {
s.push_str(" {");
let path_str = ty::item_path_str(cx, def_id);
- s.push_str(path_str.index(&FullRange));
+ s.push_str(&path_str[]);
s.push_str("}");
}
None => { }
match cty.store {
ty::UniqTraitStore => {}
ty::RegionTraitStore(region, _) => {
- s.push_str(region_to_string(cx, "", true, region).index(&FullRange));
+ s.push_str(®ion_to_string(cx, "", true, region)[]);
}
}
assert_eq!(cty.onceness, ast::Once);
s.push_str("proc");
push_sig_to_string(cx, &mut s, '(', ')', &cty.sig,
- bounds_str.index(&FullRange));
+ &bounds_str[]);
}
ty::RegionTraitStore(..) => {
match cty.onceness {
ast::Once => s.push_str("once ")
}
push_sig_to_string(cx, &mut s, '|', '|', &cty.sig,
- bounds_str.index(&FullRange));
+ &bounds_str[]);
}
}
ty::FnConverging(t) => {
if !ty::type_is_nil(t) {
s.push_str(" -> ");
- s.push_str(ty_to_string(cx, t).index(&FullRange));
+ s.push_str(&ty_to_string(cx, t)[]);
}
}
ty::FnDiverging => {
}
ty_rptr(r, ref tm) => {
let mut buf = region_ptr_to_string(cx, *r);
- buf.push_str(mt_to_string(cx, tm).index(&FullRange));
+ buf.push_str(&mt_to_string(cx, tm)[]);
buf
}
ty_open(typ) =>
.iter()
.map(|elem| ty_to_string(cx, *elem))
.collect::<Vec<_>>();
- match strs.index(&FullRange) {
+ match &strs[] {
[ref string] => format!("({},)", string),
strs => format!("({})", strs.connect(", "))
}
0
};
- for t in tps.index(&(0..(tps.len() - num_defaults))).iter() {
+ for t in tps[0..(tps.len() - num_defaults)].iter() {
strs.push(ty_to_string(cx, *t))
}
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
- strs[0].index(&(1 .. (strs[0].len() - 2))) // Remove '(' and ',)'
+ &strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
- strs[0].index(&(1 .. (strs[0].len() - 1))) // Remove '(' and ')'
+ &strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')'
} else {
- strs[0].index(&FullRange)
+ &strs[0][]
},
if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) })
} else if strs.len() > 0 {
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
let mut s = typ.repr(cx).to_string();
if s.len() >= 32u {
- s = s.index(&(0u..32u)).to_string();
+ s = (&s[0u..32u]).to_string();
}
return s;
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
- repr_vec(tcx, self.index(&FullRange))
+ repr_vec(tcx, &self[])
}
}
// autoderef cannot convert the &[T] handler
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
- repr_vec(tcx, self.index(&FullRange))
+ repr_vec(tcx, &self[])
}
}
pub fn actions_since_snapshot(&self,
snapshot: &Snapshot)
-> &[UndoLog<T,U>] {
- self.undo_log.index(&(snapshot.length..))
+ &self.undo_log[snapshot.length..]
}
fn assert_open_snapshot(&self, snapshot: &Snapshot) {
args: &str, cwd: Option<&Path>,
paths: &[&Path]) -> ProcessOutput {
let ar = match *maybe_ar_prog {
- Some(ref ar) => ar.index(&FullRange),
+ Some(ref ar) => &ar[],
None => "ar"
};
let mut cmd = Command::new(ar);
Ok(prog) => {
let o = prog.wait_with_output().unwrap();
if !o.status.success() {
- handler.err(format!("{} failed with: {}",
+ handler.err(&format!("{} failed with: {}",
cmd,
- o.status).index(&FullRange));
- handler.note(format!("stdout ---\n{}",
- str::from_utf8(o.output
- .index(&FullRange)).unwrap())
- .index(&FullRange));
- handler.note(format!("stderr ---\n{}",
- str::from_utf8(o.error
- .index(&FullRange)).unwrap())
- .index(&FullRange));
+ o.status)[]);
+ handler.note(&format!("stdout ---\n{}",
+ str::from_utf8(&o.output[]).unwrap())[]);
+ handler.note(&format!("stderr ---\n{}",
+ str::from_utf8(&o.error[]).unwrap())
+ []);
handler.abort_if_errors();
}
o
},
Err(e) => {
- handler.err(format!("could not exec `{}`: {}", ar.index(&FullRange),
- e).index(&FullRange));
+ handler.err(&format!("could not exec `{}`: {}", &ar[],
+ e)[]);
handler.abort_if_errors();
panic!("rustc::back::archive::run_ar() should not reach this point");
}
for path in search_paths.iter() {
debug!("looking for {} inside {:?}", name, path.display());
- let test = path.join(oslibname.index(&FullRange));
+ let test = path.join(&oslibname[]);
if test.exists() { return test }
if oslibname != unixlibname {
- let test = path.join(unixlibname.index(&FullRange));
+ let test = path.join(&unixlibname[]);
if test.exists() { return test }
}
}
- handler.fatal(format!("could not find native static library `{}`, \
+ handler.fatal(&format!("could not find native static library `{}`, \
perhaps an -L flag is missing?",
- name).index(&FullRange));
+ name)[]);
}
impl<'a> Archive<'a> {
/// Lists all files in an archive
pub fn files(&self) -> Vec<String> {
let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]);
- let output = str::from_utf8(output.output.index(&FullRange)).unwrap();
+ let output = str::from_utf8(&output.output[]).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of
// just `\n`
output.lines_any().map(|s| s.to_string()).collect()
/// search in the relevant locations for a library named `name`.
pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> {
let location = find_library(name,
- self.archive.slib_prefix.index(&FullRange),
- self.archive.slib_suffix.index(&FullRange),
- self.archive.lib_search_paths.index(&FullRange),
+ &self.archive.slib_prefix[],
+ &self.archive.slib_suffix[],
+ &self.archive.lib_search_paths[],
self.archive.handler);
self.add_archive(&location, name, |_| false)
}
// as simple comparison is not enough - there
// might be also an extra name suffix
let obj_start = format!("{}", name);
- let obj_start = obj_start.index(&FullRange);
+ let obj_start = &obj_start[];
// Ignoring all bytecode files, no matter of
// name
let bc_ext = ".bytecode.deflate";
- self.add_archive(rlib, name.index(&FullRange), |fname: &str| {
+ self.add_archive(rlib, &name[], |fname: &str| {
let skip_obj = lto && fname.starts_with(obj_start)
&& fname.ends_with(".o");
skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME
// allow running `ar s file.a` to update symbols only.
if self.should_update_symbols {
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
- "s", Some(self.work_dir.path()), args.index(&FullRange));
+ "s", Some(self.work_dir.path()), &args[]);
}
return self.archive;
}
// Add the archive members seen so far, without updating the
// symbol table (`S`).
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
- "cruS", Some(self.work_dir.path()), args.index(&FullRange));
+ "cruS", Some(self.work_dir.path()), &args[]);
args.clear();
args.push(&abs_dst);
// necessary.
let flags = if self.should_update_symbols { "crus" } else { "cruS" };
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
- flags, Some(self.work_dir.path()), args.index(&FullRange));
+ flags, Some(self.work_dir.path()), &args[]);
self.archive
}
} else {
filename
};
- let new_filename = self.work_dir.path().join(filename.index(&FullRange));
+ let new_filename = self.work_dir.path().join(&filename[]);
try!(fs::rename(file, &new_filename));
self.members.push(Path::new(filename));
}
l.map(|p| p.clone())
}).collect::<Vec<_>>();
- let rpaths = get_rpaths(config, libs.index(&FullRange));
- flags.push_all(rpaths_to_flags(rpaths.index(&FullRange)).index(&FullRange));
+ let rpaths = get_rpaths(config, &libs[]);
+ flags.push_all(&rpaths_to_flags(&rpaths[])[]);
flags
}
fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
let mut ret = Vec::new();
for rpath in rpaths.iter() {
- ret.push(format!("-Wl,-rpath,{}", (*rpath).index(&FullRange)));
+ ret.push(format!("-Wl,-rpath,{}", &(*rpath)[]));
}
return ret;
}
}
}
- log_rpaths("relative", rel_rpaths.index(&FullRange));
- log_rpaths("fallback", fallback_rpaths.index(&FullRange));
+ log_rpaths("relative", &rel_rpaths[]);
+ log_rpaths("fallback", &fallback_rpaths[]);
let mut rpaths = rel_rpaths;
- rpaths.push_all(fallback_rpaths.index(&FullRange));
+ rpaths.push_all(&fallback_rpaths[]);
// Remove duplicates
- let rpaths = minimize_rpaths(rpaths.index(&FullRange));
+ let rpaths = minimize_rpaths(&rpaths[]);
return rpaths;
}
let mut set = HashSet::new();
let mut minimized = Vec::new();
for rpath in rpaths.iter() {
- if set.insert(rpath.index(&FullRange)) {
+ if set.insert(&rpath[]) {
minimized.push(rpath.clone());
}
}
if input.len() >= buffer_remaining {
copy_memory(
self.buffer.slice_mut(self.buffer_idx, size),
- input.index(&(0..buffer_remaining)));
+ &input[0..buffer_remaining]);
self.buffer_idx = 0;
func(&self.buffer);
i += buffer_remaining;
// While we have at least a full buffer size chunk's worth of data, process that data
// without copying it into the buffer
while input.len() - i >= size {
- func(input.index(&(i..(i + size))));
+ func(&input[i..(i + size)]);
i += size;
}
let input_remaining = input.len() - i;
copy_memory(
self.buffer.slice_to_mut(input_remaining),
- input.index(&(i..)));
+ &input[i..]);
self.buffer_idx += input_remaining;
}
fn full_buffer<'s>(&'s mut self) -> &'s [u8] {
assert!(self.buffer_idx == 64);
self.buffer_idx = 0;
- return self.buffer.index(&(0..64));
+ return &self.buffer[0..64];
}
fn position(&self) -> uint { self.buffer_idx }
}
pub fn as_str<'a>(&'a self) -> &'a str {
- self.hash.index(&FullRange)
+ &self.hash[]
}
pub fn calculate(metadata: &Vec<String>, krate: &ast::Crate) -> Svh {
fn macro_name(mac: &Mac) -> token::InternedString {
match &mac.node {
&MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => {
- let s = path.segments.index(&FullRange);
+ let s = &path.segments[];
assert_eq!(s.len(), 1);
content(s[0].identifier)
}
.and_then(|os| os.map(|s| s.to_string())) {
Some(val) => val,
None =>
- handler.fatal((format!("Field {} in target specification is required", name))
- .index(&FullRange))
+ handler.fatal(&format!("Field {} in target specification is required", name)[])
}
};
macro_rules! key {
($key_name:ident) => ( {
let name = (stringify!($key_name)).replace("_", "-");
- obj.find(name.index(&FullRange)).map(|o| o.as_string()
+ obj.find(&name[]).map(|o| o.as_string()
.map(|s| base.options.$key_name = s.to_string()));
} );
($key_name:ident, bool) => ( {
let name = (stringify!($key_name)).replace("_", "-");
- obj.find(name.index(&FullRange))
+ obj.find(&name[])
.map(|o| o.as_boolean()
.map(|s| base.options.$key_name = s));
} );
($key_name:ident, list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
- obj.find(name.index(&FullRange)).map(|o| o.as_array()
+ obj.find(&name[]).map(|o| o.as_array()
.map(|v| base.options.$key_name = v.iter()
.map(|a| a.as_string().unwrap().to_string()).collect()
)
let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new());
- let paths = os::split_paths(target_path.index(&FullRange));
+ let paths = os::split_paths(&target_path[]);
// FIXME 16351: add a sane default search path?
for dir in paths.iter() {
(ty::MutBorrow, ty::MutBorrow) => {
self.bccx.span_err(
new_loan.span,
- format!("cannot borrow `{}`{} as mutable \
+ &format!("cannot borrow `{}`{} as mutable \
more than once at a time",
- nl, new_loan_msg).index(&FullRange))
+ nl, new_loan_msg)[])
}
(ty::UniqueImmBorrow, _) => {
self.bccx.span_err(
new_loan.span,
- format!("closure requires unique access to `{}` \
+ &format!("closure requires unique access to `{}` \
but {} is already borrowed{}",
- nl, ol_pronoun, old_loan_msg).index(&FullRange));
+ nl, ol_pronoun, old_loan_msg)[]);
}
(_, ty::UniqueImmBorrow) => {
self.bccx.span_err(
new_loan.span,
- format!("cannot borrow `{}`{} as {} because \
+ &format!("cannot borrow `{}`{} as {} because \
previous closure requires unique access",
- nl, new_loan_msg, new_loan.kind.to_user_str()).index(&FullRange));
+ nl, new_loan_msg, new_loan.kind.to_user_str())[]);
}
(_, _) => {
self.bccx.span_err(
new_loan.span,
- format!("cannot borrow `{}`{} as {} because \
+ &format!("cannot borrow `{}`{} as {} because \
{} is also borrowed as {}{}",
nl,
new_loan_msg,
new_loan.kind.to_user_str(),
ol_pronoun,
old_loan.kind.to_user_str(),
- old_loan_msg).index(&FullRange));
+ old_loan_msg)[]);
}
}
euv::ClosureCapture(span) => {
self.bccx.span_note(
span,
- format!("borrow occurs due to use of `{}` in closure",
- nl).index(&FullRange));
+ &format!("borrow occurs due to use of `{}` in closure",
+ nl)[]);
}
_ => { }
}
self.bccx.span_note(
old_loan.span,
- format!("{}; {}", borrow_summary, rule_summary).index(&FullRange));
+ &format!("{}; {}", borrow_summary, rule_summary)[]);
let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id());
self.bccx.span_end_note(old_loan_span,
UseWhileBorrowed(loan_path, loan_span) => {
self.bccx.span_err(
span,
- format!("cannot use `{}` because it was mutably borrowed",
- self.bccx.loan_path_to_string(copy_path).index(&FullRange))
- .index(&FullRange));
+ &format!("cannot use `{}` because it was mutably borrowed",
+ &self.bccx.loan_path_to_string(copy_path)[])
+ []);
self.bccx.span_note(
loan_span,
- format!("borrow of `{}` occurs here",
- self.bccx.loan_path_to_string(&*loan_path).index(&FullRange))
- .index(&FullRange));
+ &format!("borrow of `{}` occurs here",
+ &self.bccx.loan_path_to_string(&*loan_path)[])
+ []);
}
}
}
let err_message = match move_kind {
move_data::Captured =>
format!("cannot move `{}` into closure because it is borrowed",
- self.bccx.loan_path_to_string(move_path).index(&FullRange)),
+ &self.bccx.loan_path_to_string(move_path)[]),
move_data::Declared |
move_data::MoveExpr |
move_data::MovePat =>
format!("cannot move out of `{}` because it is borrowed",
- self.bccx.loan_path_to_string(move_path).index(&FullRange))
+ &self.bccx.loan_path_to_string(move_path)[])
};
- self.bccx.span_err(span, err_message.index(&FullRange));
+ self.bccx.span_err(span, &err_message[]);
self.bccx.span_note(
loan_span,
- format!("borrow of `{}` occurs here",
- self.bccx.loan_path_to_string(&*loan_path).index(&FullRange))
- .index(&FullRange));
+ &format!("borrow of `{}` occurs here",
+ &self.bccx.loan_path_to_string(&*loan_path)[])
+ []);
}
}
}
if kind == ty::FnUnboxedClosureKind {
self.bccx.span_err(
assignment_span,
- format!("cannot assign to {}",
- self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange));
+ &format!("cannot assign to {}",
+ self.bccx.cmt_to_string(&*assignee_cmt))[]);
self.bccx.span_help(
self.tcx().map.span(upvar_id.closure_expr_id),
"consider changing this closure to take self by mutable reference");
} else {
self.bccx.span_err(
assignment_span,
- format!("cannot assign to {} {}",
+ &format!("cannot assign to {} {}",
assignee_cmt.mutbl.to_user_str(),
- self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange));
+ self.bccx.cmt_to_string(&*assignee_cmt))[]);
}
}
_ => match opt_loan_path(&assignee_cmt) {
Some(lp) => {
self.bccx.span_err(
assignment_span,
- format!("cannot assign to {} {} `{}`",
+ &format!("cannot assign to {} {} `{}`",
assignee_cmt.mutbl.to_user_str(),
self.bccx.cmt_to_string(&*assignee_cmt),
- self.bccx.loan_path_to_string(&*lp)).index(&FullRange));
+ self.bccx.loan_path_to_string(&*lp))[]);
}
None => {
self.bccx.span_err(
assignment_span,
- format!("cannot assign to {} {}",
+ &format!("cannot assign to {} {}",
assignee_cmt.mutbl.to_user_str(),
- self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange));
+ self.bccx.cmt_to_string(&*assignee_cmt))[]);
}
}
}
loan: &Loan) {
self.bccx.span_err(
span,
- format!("cannot assign to `{}` because it is borrowed",
- self.bccx.loan_path_to_string(loan_path)).index(&FullRange));
+ &format!("cannot assign to `{}` because it is borrowed",
+ self.bccx.loan_path_to_string(loan_path))[]);
self.bccx.span_note(
loan.span,
- format!("borrow of `{}` occurs here",
- self.bccx.loan_path_to_string(loan_path)).index(&FullRange));
+ &format!("borrow of `{}` occurs here",
+ self.bccx.loan_path_to_string(loan_path))[]);
}
}
// This represents the collection of all but one of the elements
// from an array at the path described by the move path index.
// Note that attached MovePathIndex should have mem_categorization
- // of InteriorElement (i.e. array dereference `.index(&FullRange)`).
+ // of InteriorElement (i.e. array dereference `&foo[]`).
AllButOneFrom(MovePathIndex),
}
let attrs : &[ast::Attribute];
attrs = match tcx.map.find(id) {
Some(ast_map::NodeItem(ref item)) =>
- item.attrs.index(&FullRange),
+ &item.attrs[],
Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) =>
- m.attrs.index(&FullRange),
+ &m.attrs[],
Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) =>
- m.attrs.index(&FullRange),
- _ => [].index(&FullRange),
+ &m.attrs[],
+ _ => &[][],
};
let span_err =
for (i, mpi) in vec_rc.iter().enumerate() {
let render = |&:| this.path_loan_path(*mpi).user_string(tcx);
if span_err {
- tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange));
+ tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]);
}
if print {
println!("id:{} {}[{}] `{}`", id, kind, i, render());
for (i, f) in vec_rc.iter().enumerate() {
let render = |&:| f.loan_path_user_string(this, tcx);
if span_err {
- tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange));
+ tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]);
}
if print {
println!("id:{} {}[{}] `{}`", id, kind, i, render());
// First, filter out duplicates
moved.sort();
moved.dedup();
- debug!("fragments 1 moved: {:?}", path_lps(moved.index(&FullRange)));
+ debug!("fragments 1 moved: {:?}", path_lps(&moved[]));
assigned.sort();
assigned.dedup();
- debug!("fragments 1 assigned: {:?}", path_lps(assigned.index(&FullRange)));
+ debug!("fragments 1 assigned: {:?}", path_lps(&assigned[]));
// Second, build parents from the moved and assigned.
for m in moved.iter() {
parents.sort();
parents.dedup();
- debug!("fragments 2 parents: {:?}", path_lps(parents.index(&FullRange)));
+ debug!("fragments 2 parents: {:?}", path_lps(&parents[]));
// Third, filter the moved and assigned fragments down to just the non-parents
- moved.retain(|f| non_member(*f, parents.index(&FullRange)));
- debug!("fragments 3 moved: {:?}", path_lps(moved.index(&FullRange)));
+ moved.retain(|f| non_member(*f, &parents[]));
+ debug!("fragments 3 moved: {:?}", path_lps(&moved[]));
- assigned.retain(|f| non_member(*f, parents.index(&FullRange)));
- debug!("fragments 3 assigned: {:?}", path_lps(assigned.index(&FullRange)));
+ assigned.retain(|f| non_member(*f, &parents[]));
+ debug!("fragments 3 assigned: {:?}", path_lps(&assigned[]));
// Fourth, build the leftover from the moved, assigned, and parents.
for m in moved.iter() {
unmoved.sort();
unmoved.dedup();
- debug!("fragments 4 unmoved: {:?}", frag_lps(unmoved.index(&FullRange)));
+ debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[]));
// Fifth, filter the leftover fragments down to its core.
unmoved.retain(|f| match *f {
AllButOneFrom(_) => true,
- Just(mpi) => non_member(mpi, parents.index(&FullRange)) &&
- non_member(mpi, moved.index(&FullRange)) &&
- non_member(mpi, assigned.index(&FullRange))
+ Just(mpi) => non_member(mpi, &parents[]) &&
+ non_member(mpi, &moved[]) &&
+ non_member(mpi, &assigned[])
});
- debug!("fragments 5 unmoved: {:?}", frag_lps(unmoved.index(&FullRange)));
+ debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[]));
// Swap contents back in.
fragments.unmoved_fragments = unmoved;
let msg = format!("type {} ({:?}) is not fragmentable",
parent_ty.repr(tcx), sty_and_variant_info);
let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id));
- tcx.sess.opt_span_bug(opt_span, msg.index(&FullRange))
+ tcx.sess.opt_span_bug(opt_span, &msg[])
}
}
}
ty::ReInfer(..) => {
self.tcx().sess.span_bug(
cmt.span,
- format!("invalid borrow lifetime: {:?}",
- loan_region).index(&FullRange));
+ &format!("invalid borrow lifetime: {:?}",
+ loan_region)[]);
}
};
debug!("loan_scope = {:?}", loan_scope);
mc::cat_static_item => {
bccx.span_err(
move_from.span,
- format!("cannot move out of {}",
- bccx.cmt_to_string(&*move_from)).index(&FullRange));
+ &format!("cannot move out of {}",
+ bccx.cmt_to_string(&*move_from))[]);
}
mc::cat_downcast(ref b, _) |
| ty::ty_enum(did, _) if ty::has_dtor(bccx.tcx, did) => {
bccx.span_err(
move_from.span,
- format!("cannot move out of type `{}`, \
+ &format!("cannot move out of type `{}`, \
which defines the `Drop` trait",
- b.ty.user_string(bccx.tcx)).index(&FullRange));
+ b.ty.user_string(bccx.tcx))[]);
},
_ => panic!("this path should not cause illegal move")
}
"attempting to move value to here");
bccx.span_help(
move_to_span,
- format!("to prevent the move, \
+ &format!("to prevent the move, \
use `ref {0}` or `ref mut {0}` to capture value by \
reference",
- pat_name).index(&FullRange));
+ pat_name)[]);
} else {
bccx.span_note(move_to_span,
- format!("and here (use `ref {0}` or `ref mut {0}`)",
- pat_name).index(&FullRange));
+ &format!("and here (use `ref {0}` or `ref mut {0}`)",
+ pat_name)[]);
}
}
check_loans::check_loans(this,
&loan_dfcx,
flowed_moves,
- all_loans.index(&FullRange),
+ &all_loans[],
id,
decl,
body);
pub fn report(&self, err: BckError<'tcx>) {
self.span_err(
err.span,
- self.bckerr_to_string(&err).index(&FullRange));
+ &self.bckerr_to_string(&err)[]);
self.note_and_explain_bckerr(err);
}
move_data::Declared => {
self.tcx.sess.span_err(
use_span,
- format!("{} of possibly uninitialized variable: `{}`",
+ &format!("{} of possibly uninitialized variable: `{}`",
verb,
- self.loan_path_to_string(lp)).index(&FullRange));
+ self.loan_path_to_string(lp))[]);
(self.loan_path_to_string(moved_lp),
String::new())
}
else { "" };
self.tcx.sess.span_err(
use_span,
- format!("{} of {}moved value: `{}`",
+ &format!("{} of {}moved value: `{}`",
verb,
msg,
- nl).index(&FullRange));
+ nl)[]);
(ol, moved_lp_msg)
}
};
(ty::expr_ty_adjusted(self.tcx, &*expr), expr.span)
}
r => {
- self.tcx.sess.bug(format!("MoveExpr({}) maps to \
+ self.tcx.sess.bug(&format!("MoveExpr({}) maps to \
{:?}, not Expr",
the_move.id,
- r).index(&FullRange))
+ r)[])
}
};
let (suggestion, _) =
move_suggestion(param_env, expr_span, expr_ty, ("moved by default", ""));
self.tcx.sess.span_note(
expr_span,
- format!("`{}` moved here{} because it has type `{}`, which is {}",
+ &format!("`{}` moved here{} because it has type `{}`, which is {}",
ol,
moved_lp_msg,
expr_ty.user_string(self.tcx),
- suggestion).index(&FullRange));
+ suggestion)[]);
}
move_data::MovePat => {
let pat_ty = ty::node_id_to_type(self.tcx, the_move.id);
let span = self.tcx.map.span(the_move.id);
self.tcx.sess.span_note(span,
- format!("`{}` moved here{} because it has type `{}`, \
+ &format!("`{}` moved here{} because it has type `{}`, \
which is moved by default",
ol,
moved_lp_msg,
- pat_ty.user_string(self.tcx)).index(&FullRange));
+ pat_ty.user_string(self.tcx))[]);
self.tcx.sess.span_help(span,
"use `ref` to override");
}
(ty::expr_ty_adjusted(self.tcx, &*expr), expr.span)
}
r => {
- self.tcx.sess.bug(format!("Captured({}) maps to \
+ self.tcx.sess.bug(&format!("Captured({}) maps to \
{:?}, not Expr",
the_move.id,
- r).index(&FullRange))
+ r)[])
}
};
let (suggestion, help) =
"make a copy and capture that instead to override"));
self.tcx.sess.span_note(
expr_span,
- format!("`{}` moved into closure environment here{} because it \
+ &format!("`{}` moved into closure environment here{} because it \
has type `{}`, which is {}",
ol,
moved_lp_msg,
expr_ty.user_string(self.tcx),
- suggestion).index(&FullRange));
+ suggestion)[]);
self.tcx.sess.span_help(expr_span, help);
}
}
&move_data::Assignment) {
self.tcx.sess.span_err(
span,
- format!("re-assignment of immutable variable `{}`",
- self.loan_path_to_string(lp)).index(&FullRange));
+ &format!("re-assignment of immutable variable `{}`",
+ self.loan_path_to_string(lp))[]);
self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
}
mc::AliasableOther => {
self.tcx.sess.span_err(
span,
- format!("{} in an aliasable location",
- prefix).index(&FullRange));
+ &format!("{} in an aliasable location",
+ prefix)[]);
}
mc::AliasableClosure(id) => {
self.tcx.sess.span_err(span,
mc::AliasableStaticMut(..) => {
self.tcx.sess.span_err(
span,
- format!("{} in a static location", prefix).index(&FullRange));
+ &format!("{} in a static location", prefix)[]);
}
mc::AliasableBorrowed => {
self.tcx.sess.span_err(
span,
- format!("{} in a `&` reference", prefix).index(&FullRange));
+ &format!("{} in a `&` reference", prefix)[]);
}
}
};
note_and_explain_region(
self.tcx,
- format!("{} would have to be valid for ",
- descr).index(&FullRange),
+ &format!("{} would have to be valid for ",
+ descr)[],
loan_scope,
"...");
note_and_explain_region(
self.tcx,
- format!("...but {} is only valid for ", descr).index(&FullRange),
+ &format!("...but {} is only valid for ", descr)[],
ptr_scope,
"");
}
out.push('(');
self.append_loan_path_to_string(&**lp_base, out);
out.push_str(DOWNCAST_PRINTED_OPERATOR);
- out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange));
+ out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]);
out.push(')');
}
}
mc::PositionalField(idx) => {
out.push('.');
- out.push_str(idx.to_string().index(&FullRange));
+ out.push_str(&idx.to_string()[]);
}
}
}
out.push('(');
self.append_autoderefd_loan_path_to_string(&**lp_base, out);
out.push(':');
- out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange));
+ out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]);
out.push(')');
}
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
- sets.push_str(self.dataflow_for_variant(e, n, variant).index(&FullRange));
+ sets.push_str(&self.dataflow_for_variant(e, n, variant)[]);
}
sets
}
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
- set.push_str(loan_str.index(&FullRange));
+ set.push_str(&loan_str[]);
saw_some = true;
true
});
let outputs = build_output_filenames(input,
outdir,
output,
- krate.attrs.index(&FullRange),
+ &krate.attrs[],
&sess);
- let id = link::find_crate_name(Some(&sess), krate.attrs.index(&FullRange),
+ let id = link::find_crate_name(Some(&sess), &krate.attrs[],
input);
let expanded_crate
- = match phase_2_configure_and_expand(&sess, krate, id.index(&FullRange),
+ = match phase_2_configure_and_expand(&sess, krate, &id[],
addl_plugins) {
None => return,
Some(k) => k
let mut forest = ast_map::Forest::new(expanded_crate);
let ast_map = assign_node_ids_and_map(&sess, &mut forest);
- write_out_deps(&sess, input, &outputs, id.index(&FullRange));
+ write_out_deps(&sess, input, &outputs, &id[]);
if stop_after_phase_2(&sess) { return; }
let time_passes = sess.time_passes();
*sess.crate_types.borrow_mut() =
- collect_crate_types(sess, krate.attrs.index(&FullRange));
+ collect_crate_types(sess, &krate.attrs[]);
*sess.crate_metadata.borrow_mut() =
- collect_crate_metadata(sess, krate.attrs.index(&FullRange));
+ collect_crate_metadata(sess, &krate.attrs[]);
time(time_passes, "recursion limit", (), |_| {
middle::recursion_limit::update_recursion_limit(sess, &krate);
if cfg!(windows) {
_old_path = os::getenv("PATH").unwrap_or(_old_path);
let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths();
- new_path.extend(os::split_paths(_old_path.index(&FullRange)).into_iter());
- os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap());
+ new_path.extend(os::split_paths(&_old_path[]).into_iter());
+ os::setenv("PATH", os::join_paths(&new_path[]).unwrap());
}
let cfg = syntax::ext::expand::ExpansionConfig {
crate_name: crate_name.to_string(),
time(sess.time_passes(), "LLVM passes", (), |_|
write::run_passes(sess,
trans,
- sess.opts.output_types.index(&FullRange),
+ &sess.opts.output_types[],
outputs));
}
outputs: &OutputFilenames) {
let old_path = os::getenv("PATH").unwrap_or_else(||String::new());
let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths();
- new_path.extend(os::split_paths(old_path.index(&FullRange)).into_iter());
- os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap());
+ new_path.extend(os::split_paths(&old_path[]).into_iter());
+ os::setenv("PATH", os::join_paths(&new_path[]).unwrap());
time(sess.time_passes(), "linking", (), |_|
link::link_binary(sess,
trans,
outputs,
- trans.link.crate_name.index(&FullRange)));
+ &trans.link.crate_name[]));
os::setenv("PATH", old_path);
}
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file())
- .map(|fmap| escape_dep_filename(fmap.name.index(&FullRange)))
+ .map(|fmap| escape_dep_filename(&fmap.name[]))
.collect();
let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() {
match result {
Ok(()) => {}
Err(e) => {
- sess.fatal(format!("error writing dependencies to `{}`: {}",
- deps_filename.display(), e).index(&FullRange));
+ sess.fatal(&format!("error writing dependencies to `{}`: {}",
+ deps_filename.display(), e)[]);
}
}
}
let res = !link::invalid_output_for_target(session, *crate_type);
if !res {
- session.warn(format!("dropping unsupported crate type `{:?}` \
+ session.warn(&format!("dropping unsupported crate type `{:?}` \
for target `{}`",
- *crate_type, session.opts.target_triple).index(&FullRange));
+ *crate_type, session.opts.target_triple)[]);
}
res
let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS);
match matches.opt_str("explain") {
Some(ref code) => {
- match descriptions.find_description(code.index(&FullRange)) {
+ match descriptions.find_description(&code[]) {
Some(ref description) => {
println!("{}", description);
}
None => {
- early_error(format!("no extended information for {}", code).index(&FullRange));
+ early_error(&format!("no extended information for {}", code)[]);
}
}
return;
early_error("no input filename given");
}
1u => {
- let ifile = matches.free[0].index(&FullRange);
+ let ifile = &matches.free[0][];
if ifile == "-" {
let contents = io::stdin().read_to_end().unwrap();
let src = String::from_utf8(contents).unwrap();
for lint in lints.into_iter() {
let name = lint.name_lower().replace("_", "-");
println!(" {} {:7.7} {}",
- padded(name.index(&FullRange)), lint.default_level.as_str(), lint.desc);
+ padded(&name[]), lint.default_level.as_str(), lint.desc);
}
println!("\n");
};
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
.collect::<Vec<String>>().connect(", ");
println!(" {} {}",
- padded(name.index(&FullRange)), desc);
+ padded(&name[]), desc);
}
println!("\n");
};
}
let matches =
- match getopts::getopts(args.index(&FullRange), config::optgroups().index(&FullRange)) {
+ match getopts::getopts(&args[], &config::optgroups()[]) {
Ok(m) => m,
Err(f_stable_attempt) => {
// redo option parsing, including unstable options this time,
"run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
];
for note in xs.iter() {
- emitter.emit(None, note.index(&FullRange), None, diagnostic::Note)
+ emitter.emit(None, ¬e[], None, diagnostic::Note)
}
match r.read_to_string() {
Ok(s) => println!("{}", s),
Err(e) => {
emitter.emit(None,
- format!("failed to read internal \
- stderr: {}", e).index(&FullRange),
+ &format!("failed to read internal \
+ stderr: {}", e)[],
None,
diagnostic::Error)
}
try!(pp::word(&mut s.s, "as"));
try!(pp::space(&mut s.s));
try!(pp::word(&mut s.s,
- ppaux::ty_to_string(
+ &ppaux::ty_to_string(
tcx,
- ty::expr_ty(tcx, expr)).index(&FullRange)));
+ ty::expr_ty(tcx, expr))[]));
s.pclose()
}
_ => Ok(())
ItemViaNode(node_id) =>
NodesMatchingDirect(Some(node_id).into_iter()),
ItemViaPath(ref parts) =>
- NodesMatchingSuffix(map.nodes_matching_suffix(parts.index(&FullRange))),
+ NodesMatchingSuffix(map.nodes_matching_suffix(&parts[])),
}
}
user_option,
self.reconstructed_input(),
is_wrong_because);
- sess.fatal(message.index(&FullRange))
+ sess.fatal(&message[])
};
let mut saw_node = ast::DUMMY_NODE_ID;
let is_expanded = needs_expansion(&ppm);
let compute_ast_map = needs_ast_map(&ppm, &opt_uii);
let krate = if compute_ast_map {
- match driver::phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), None) {
+ match driver::phase_2_configure_and_expand(&sess, krate, &id[], None) {
None => return,
Some(k) => k
}
};
let src_name = driver::source_name(input);
- let src = sess.codemap().get_filemap(src_name.index(&FullRange))
+ let src = sess.codemap().get_filemap(&src_name[])
.src.as_bytes().to_vec();
let mut rdr = MemReader::new(src);
(PpmFlowGraph, opt_uii) => {
debug!("pretty printing flow graph for {:?}", opt_uii);
let uii = opt_uii.unwrap_or_else(|| {
- sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or
- unique path suffix (b::c::d)").index(&FullRange))
+ sess.fatal(&format!("`pretty flowgraph=..` needs NodeId (int) or
+ unique path suffix (b::c::d)")[])
});
let ast_map = ast_map.expect("--pretty flowgraph missing ast_map");
let nodeid = uii.to_one_node_id("--pretty", &sess, &ast_map);
let node = ast_map.find(nodeid).unwrap_or_else(|| {
- sess.fatal(format!("--pretty flowgraph couldn't find id: {}",
- nodeid).index(&FullRange))
+ sess.fatal(&format!("--pretty flowgraph couldn't find id: {}",
+ nodeid)[])
});
let code = blocks::Code::from_node(node);
// point to what was found, if there's an
// accessible span.
match ast_map.opt_span(nodeid) {
- Some(sp) => sess.span_fatal(sp, message.index(&FullRange)),
- None => sess.fatal(message.index(&FullRange))
+ Some(sp) => sess.span_fatal(sp, &message[]),
+ None => sess.fatal(&message[])
}
}
}
pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> {
let name = format!("T{}", index);
- ty::mk_param(self.infcx.tcx, space, index, token::intern(name.index(&FullRange)))
+ ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[]))
}
pub fn re_early_bound(&self,
// had the duplicate.
let ns = ns.unwrap();
self.resolve_error(sp,
- format!("duplicate definition of {} `{}`",
+ &format!("duplicate definition of {} `{}`",
namespace_error_to_string(duplicate_type),
- token::get_name(name)).index(&FullRange));
+ token::get_name(name))[]);
{
let r = child.span_for_namespace(ns);
for sp in r.iter() {
self.session.span_note(*sp,
- format!("first definition of {} `{}` here",
+ &format!("first definition of {} `{}` here",
namespace_error_to_string(duplicate_type),
- token::get_name(name)).index(&FullRange));
+ token::get_name(name))[]);
}
}
}
SingleImport(target, _) => {
debug!("(building import directive) building import \
directive: {}::{}",
- self.names_to_string(module_.imports.borrow().last().unwrap()
- .module_path.index(&FullRange)),
+ self.names_to_string(&module_.imports.borrow().last().unwrap().
+ module_path[]),
token::get_name(target));
let mut import_resolutions = module_.import_resolutions
};
let msg = format!("unresolved import `{}`{}",
self.import_path_to_string(
- import_directive.module_path
- .index(&FullRange),
+ &import_directive.module_path[],
import_directive.subclass),
help);
- self.resolve_error(span, msg.index(&FullRange));
+ self.resolve_error(span, &msg[]);
}
Indeterminate => break, // Bail out. We'll come around next time.
Success(()) => () // Good. Continue.
.iter()
.map(|seg| seg.identifier.name)
.collect();
- self.names_to_string(names.index(&FullRange))
+ self.names_to_string(&names[])
}
fn import_directive_subclass_to_string(&mut self,
let module_path = &import_directive.module_path;
debug!("(resolving import for module) resolving import `{}::...` in `{}`",
- self.names_to_string(module_path.index(&FullRange)),
+ self.names_to_string(&module_path[]),
self.module_to_string(&*module_));
// First, resolve the module path for the directive, if necessary.
Some((self.graph_root.get_module(), LastMod(AllPublic)))
} else {
match self.resolve_module_path(module_.clone(),
- module_path.index(&FullRange),
+ &module_path[],
DontUseLexicalScope,
import_directive.span,
ImportSearch) {
ValueNS => "value",
},
token::get_name(name).get());
- self.session.span_err(import_span, msg.index(&FullRange));
+ self.session.span_err(import_span, &msg[]);
}
Some(_) | None => {}
}
if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) {
let msg = format!("`{}` is not directly importable",
token::get_name(name));
- self.session.span_err(import_span, msg.index(&FullRange));
+ self.session.span_err(import_span, &msg[]);
}
}
crate in this module \
(maybe you meant `use {0}::*`?)",
token::get_name(name).get());
- self.session.span_err(import_span, msg.index(&FullRange));
+ self.session.span_err(import_span, &msg[]);
}
Some(_) | None => {}
}
let msg = format!("import `{}` conflicts with value \
in this module",
token::get_name(name).get());
- self.session.span_err(import_span, msg.index(&FullRange));
+ self.session.span_err(import_span, &msg[]);
if let Some(span) = value.value_span {
self.session.span_note(span,
"conflicting value here");
let msg = format!("import `{}` conflicts with type in \
this module",
token::get_name(name).get());
- self.session.span_err(import_span, msg.index(&FullRange));
+ self.session.span_err(import_span, &msg[]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
"note conflicting type here")
let msg = format!("inherent implementations \
are only allowed on types \
defined in the current module");
- self.session.span_err(span, msg.index(&FullRange));
+ self.session.span_err(span, &msg[]);
self.session.span_note(import_span,
"import from other module here")
}
let msg = format!("import `{}` conflicts with existing \
submodule",
token::get_name(name).get());
- self.session.span_err(import_span, msg.index(&FullRange));
+ self.session.span_err(import_span, &msg[]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
"note conflicting module here")
if module.external_module_children.borrow().contains_key(&name) {
self.session
.span_err(span,
- format!("an external crate named `{}` has already \
+ &format!("an external crate named `{}` has already \
been imported into this module",
- token::get_name(name).get()).index(&FullRange));
+ token::get_name(name).get())[]);
}
}
if module.external_module_children.borrow().contains_key(&name) {
self.session
.span_err(span,
- format!("the name `{}` conflicts with an external \
+ &format!("the name `{}` conflicts with an external \
crate that has been imported into this \
module",
- token::get_name(name).get()).index(&FullRange));
+ token::get_name(name).get())[]);
}
}
let segment_name = token::get_name(name);
let module_name = self.module_to_string(&*search_module);
let mut span = span;
- let msg = if "???" == module_name.index(&FullRange) {
+ let msg = if "???" == &module_name[] {
span.hi = span.lo + Pos::from_uint(segment_name.get().len());
match search_parent_externals(name,
match module_prefix_result {
Failed(None) => {
let mpath = self.names_to_string(module_path);
- let mpath = mpath.index(&FullRange);
+ let mpath = &mpath[];
match mpath.rfind(':') {
Some(idx) => {
let msg = format!("Could not find `{}` in `{}`",
// idx +- 1 to account for the
// colons on either side
- mpath.index(&((idx + 1)..)),
- mpath.index(&(0..(idx - 1))));
+ &mpath[(idx + 1)..],
+ &mpath[0..(idx - 1)]);
return Failed(Some((span, msg)));
},
None => {
PathSearch,
true) {
Failed(Some((span, msg))) =>
- self.resolve_error(span, format!("failed to resolve. {}",
- msg).index(&FullRange)),
+ self.resolve_error(span, &format!("failed to resolve. {}",
+ msg)[]),
Failed(None) => (), // Continue up the search chain.
Indeterminate => {
// We couldn't see through the higher scope because of an
} else {
let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn);
- self.resolve_error((*imports)[index].span, err.index(&FullRange));
+ self.resolve_error((*imports)[index].span, &err[]);
}
}
match def_like {
DlDef(d @ DefUpvar(..)) => {
self.session.span_bug(span,
- format!("unexpected {:?} in bindings", d).index(&FullRange))
+ &format!("unexpected {:?} in bindings", d)[])
}
DlDef(d @ DefLocal(_)) => {
let node_id = d.def_id().node;
for (i, rib) in ribs.iter().enumerate().rev() {
match rib.bindings.get(&name).cloned() {
Some(def_like) => {
- return self.upvarify(ribs.index(&((i + 1)..)), def_like, span);
+ return self.upvarify(&ribs[(i + 1)..], def_like, span);
}
None => {
// Continue.
generics,
implemented_traits,
&**self_type,
- impl_items.index(&FullRange));
+ &impl_items[]);
}
ItemTrait(_, ref generics, ref bounds, ref trait_items) => {
ItemStruct(ref struct_def, ref generics) => {
self.resolve_struct(item.id,
generics,
- struct_def.fields.index(&FullRange));
+ &struct_def.fields[]);
}
ItemMod(ref module_) => {
if seen_bindings.contains(&name) {
self.resolve_error(type_parameter.span,
- format!("the name `{}` is already \
+ &format!("the name `{}` is already \
used for a type \
parameter in this type \
parameter list",
token::get_name(
- name)).index(&FullRange))
+ name))[])
}
seen_bindings.insert(name);
};
let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
- self.resolve_error(trait_reference.path.span, msg.index(&FullRange));
+ self.resolve_error(trait_reference.path.span, &msg[]);
}
Some(def) => {
match def {
}
(def, _) => {
self.resolve_error(trait_reference.path.span,
- format!("`{}` is not a trait",
+ &format!("`{}` is not a trait",
self.path_names_to_string(
- &trait_reference.path)).index(&FullRange));
+ &trait_reference.path))[]);
// If it's a typedef, give a note
if let DefTy(..) = def {
self.session.span_note(
trait_reference.path.span,
- format!("`type` aliases cannot be used for traits")
- .index(&FullRange));
+ &format!("`type` aliases cannot be used for traits")
+ []);
}
}
}
if self.trait_item_map.get(&(name, did)).is_none() {
let path_str = self.path_names_to_string(&trait_ref.path);
self.resolve_error(span,
- format!("method `{}` is not a member of trait `{}`",
+ &format!("method `{}` is not a member of trait `{}`",
token::get_name(name),
- path_str).index(&FullRange));
+ path_str)[]);
}
}
}
None => {
self.resolve_error(
p.span,
- format!("variable `{}` from pattern #1 is \
+ &format!("variable `{}` from pattern #1 is \
not bound in pattern #{}",
token::get_name(key),
- i + 1).index(&FullRange));
+ i + 1)[]);
}
Some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode {
self.resolve_error(
binding_i.span,
- format!("variable `{}` is bound with different \
+ &format!("variable `{}` is bound with different \
mode in pattern #{} than in pattern #1",
token::get_name(key),
- i + 1).index(&FullRange));
+ i + 1)[]);
}
}
}
if !map_0.contains_key(&key) {
self.resolve_error(
binding.span,
- format!("variable `{}` from pattern {}{} is \
+ &format!("variable `{}` from pattern {}{} is \
not bound in pattern {}1",
token::get_name(key),
- "#", i + 1, "#").index(&FullRange));
+ "#", i + 1, "#")[]);
}
}
}
None => {
let msg = format!("use of undeclared type name `{}`",
self.path_names_to_string(path));
- self.resolve_error(ty.span, msg.index(&FullRange));
+ self.resolve_error(ty.span, &msg[]);
}
}
}
FoundStructOrEnumVariant(..) => {
self.resolve_error(
pattern.span,
- format!("declaration of `{}` shadows an enum \
+ &format!("declaration of `{}` shadows an enum \
variant or unit-like struct in \
scope",
- token::get_name(renamed)).index(&FullRange));
+ token::get_name(renamed))[]);
}
FoundConst(ref def, lp) if mode == RefutableMode => {
debug!("(resolving pattern) resolving `{}` to \
// Forbid duplicate bindings in the same
// parameter list.
self.resolve_error(pattern.span,
- format!("identifier `{}` \
+ &format!("identifier `{}` \
is bound more \
than once in \
this parameter \
list",
token::get_ident(
ident))
- .index(&FullRange))
+ [])
} else if bindings_list.get(&renamed) ==
Some(&pat_id) {
// Then this is a duplicate variable in the
// same disjunction, which is an error.
self.resolve_error(pattern.span,
- format!("identifier `{}` is bound \
+ &format!("identifier `{}` is bound \
more than once in the same \
pattern",
- token::get_ident(ident)).index(&FullRange));
+ token::get_ident(ident))[]);
}
// Else, not bound in the same pattern: do
// nothing.
def: {:?}", result);
let msg = format!("`{}` does not name a structure",
self.path_names_to_string(path));
- self.resolve_error(path.span, msg.index(&FullRange));
+ self.resolve_error(path.span, &msg[]);
}
}
}
Failed(err) => {
match err {
Some((span, msg)) => {
- self.resolve_error(span, format!("failed to resolve: {}",
- msg).index(&FullRange));
+ self.resolve_error(span, &format!("failed to resolve: {}",
+ msg)[]);
}
None => ()
}
let last_private;
let module = self.current_module.clone();
match self.resolve_module_path(module,
- module_path.index(&FullRange),
+ &module_path[],
UseLexicalScope,
path.span,
PathSearch) {
}
};
- self.resolve_error(span, format!("failed to resolve. {}",
- msg).index(&FullRange));
+ self.resolve_error(span, &format!("failed to resolve. {}",
+ msg)[]);
return None;
}
Indeterminate => panic!("indeterminate unexpected"),
let containing_module;
let last_private;
match self.resolve_module_path_from_root(root_module,
- module_path.index(&FullRange),
+ &module_path[],
0,
path.span,
PathSearch,
Some((span, msg)) => (span, msg),
None => {
let msg = format!("Use of undeclared module `::{}`",
- self.names_to_string(module_path.index(&FullRange)));
+ self.names_to_string(&module_path[]));
(path.span, msg)
}
};
- self.resolve_error(span, format!("failed to resolve. {}",
- msg).index(&FullRange));
+ self.resolve_error(span, &format!("failed to resolve. {}",
+ msg)[]);
return None;
}
}
TypeNS => {
let name = ident.name;
- self.search_ribs(self.type_ribs.index(&FullRange), name, span)
+ self.search_ribs(&self.type_ribs[], name, span)
}
};
Failed(err) => {
match err {
Some((span, msg)) =>
- self.resolve_error(span, format!("failed to resolve. {}",
- msg).index(&FullRange)),
+ self.resolve_error(span, &format!("failed to resolve. {}",
+ msg)[]),
None => ()
}
}
} else {
match this.resolve_module_path(root,
- name_path.index(&FullRange),
+ &name_path[],
UseLexicalScope,
span,
PathSearch) {
let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::<Vec<_>>();
// Look for a method in the current self type's impl module.
- match get_module(self, path.span, name_path.index(&FullRange)) {
+ match get_module(self, path.span, &name_path[]) {
Some(module) => match module.children.borrow().get(&name) {
Some(binding) => {
let p_str = self.path_names_to_string(&path);
def: {:?}", result);
let msg = format!("`{}` does not name a structure",
self.path_names_to_string(path));
- self.resolve_error(path.span, msg.index(&FullRange));
+ self.resolve_error(path.span, &msg[]);
}
}
None => {
self.resolve_error(
expr.span,
- format!("use of undeclared label `{}`",
- token::get_ident(label)).index(&FullRange))
+ &format!("use of undeclared label `{}`",
+ token::get_ident(label))[])
}
Some(DlDef(def @ DefLabel(_))) => {
// Since this def is a label, it is never read.
// the same conclusion! - nmatsakis
Occupied(entry) => if def != *entry.get() {
self.session
- .bug(format!("node_id {} resolved first to {:?} and \
+ .bug(&format!("node_id {} resolved first to {:?} and \
then {:?}",
node_id,
*entry.get(),
- def).index(&FullRange));
+ def)[]);
},
Vacant(entry) => { entry.insert(def); },
}
BindByValue(_) => {}
BindByRef(..) => {
self.resolve_error(pat.span,
- format!("cannot use `ref` binding mode \
+ &format!("cannot use `ref` binding mode \
with {}",
- descr).index(&FullRange));
+ descr)[]);
}
}
}
if names.len() == 0 {
return "???".to_string();
}
- self.names_to_string(names.into_iter().rev()
- .collect::<Vec<ast::Name>>().index(&FullRange))
+ self.names_to_string(&names.into_iter().rev()
+ .collect::<Vec<ast::Name>>()[])
}
#[allow(dead_code)] // useful for debugging
attrs: &[ast::Attribute],
input: &Input) -> String {
let validate = |&: s: String, span: Option<Span>| {
- creader::validate_crate_name(sess, s.index(&FullRange), span);
+ creader::validate_crate_name(sess, &s[], span);
s
};
let msg = format!("--crate-name and #[crate_name] are \
required to match, but `{}` != `{}`",
s, name);
- sess.span_err(attr.span, msg.index(&FullRange));
+ sess.span_err(attr.span, &msg[]);
}
}
return validate(s.clone(), None);
// to be independent of one another in the crate.
symbol_hasher.reset();
- symbol_hasher.input_str(link_meta.crate_name.index(&FullRange));
+ symbol_hasher.input_str(&link_meta.crate_name[]);
symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str());
for meta in tcx.sess.crate_metadata.borrow().iter() {
- symbol_hasher.input_str(meta.index(&FullRange));
+ symbol_hasher.input_str(&meta[]);
}
symbol_hasher.input_str("-");
- symbol_hasher.input_str(encoder::encoded_ty(tcx, t).index(&FullRange));
+ symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]);
// Prefix with 'h' so that it never blends into adjacent digits
let mut hash = String::from_str("h");
- hash.push_str(truncated_hash_result(symbol_hasher).index(&FullRange));
+ hash.push_str(&truncated_hash_result(symbol_hasher)[]);
hash
}
let mut tstr = String::new();
for c in c.escape_unicode() { tstr.push(c) }
result.push('$');
- result.push_str(tstr.index(&(1..)));
+ result.push_str(&tstr[1..]);
}
}
}
if result.len() > 0u &&
result.as_bytes()[0] != '_' as u8 &&
! (result.as_bytes()[0] as char).is_xid_start() {
- return format!("_{}", result.index(&FullRange));
+ return format!("_{}", &result[]);
}
return result;
fn push(n: &mut String, s: &str) {
let sani = sanitize(s);
- n.push_str(format!("{}{}", sani.len(), sani).index(&FullRange));
+ n.push_str(&format!("{}{}", sani.len(), sani)[]);
}
// First, connect each component with <len, name> pairs.
for e in path {
- push(&mut n, token::get_name(e.name()).get().index(&FullRange))
+ push(&mut n, &token::get_name(e.name()).get()[])
}
match hash {
hash.push(EXTRA_CHARS.as_bytes()[extra2] as char);
hash.push(EXTRA_CHARS.as_bytes()[extra3] as char);
- exported_name(path, hash.index(&FullRange))
+ exported_name(path, &hash[])
}
pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>,
name: &str) -> String {
let s = ppaux::ty_to_string(ccx.tcx(), t);
- let path = [PathName(token::intern(s.index(&FullRange))),
+ let path = [PathName(token::intern(&s[])),
gensym_name(name)];
let hash = get_symbol_hash(ccx, t);
- mangle(ast_map::Values(path.iter()), Some(hash.index(&FullRange)))
+ mangle(ast_map::Values(path.iter()), Some(&hash[]))
}
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
match fs::unlink(path) {
Ok(..) => {}
Err(e) => {
- sess.err(format!("failed to remove {}: {}",
+ sess.err(&format!("failed to remove {}: {}",
path.display(),
- e).index(&FullRange));
+ e)[]);
}
}
}
let mut out_filenames = Vec::new();
for &crate_type in sess.crate_types.borrow().iter() {
if invalid_output_for_target(sess, crate_type) {
- sess.bug(format!("invalid output type `{:?}` for target os `{}`",
- crate_type, sess.opts.target_triple).index(&FullRange));
+ sess.bug(&format!("invalid output type `{:?}` for target os `{}`",
+ crate_type, sess.opts.target_triple)[]);
}
let out_file = link_binary_output(sess, trans, crate_type, outputs,
crate_name);
out_filename.with_filename(format!("lib{}.rlib", libname))
}
config::CrateTypeDylib => {
- let (prefix, suffix) = (sess.target.target.options.dll_prefix.index(&FullRange),
- sess.target.target.options.dll_suffix.index(&FullRange));
+ let (prefix, suffix) = (&sess.target.target.options.dll_prefix[],
+ &sess.target.target.options.dll_suffix[]);
out_filename.with_filename(format!("{}{}{}",
prefix,
libname,
out_filename.with_filename(format!("lib{}.a", libname))
}
config::CrateTypeExecutable => {
- let suffix = sess.target.target.options.exe_suffix.index(&FullRange);
+ let suffix = &sess.target.target.options.exe_suffix[];
out_filename.with_filename(format!("{}{}", libname, suffix))
}
}
let obj_is_writeable = is_writeable(&obj_filename);
let out_is_writeable = is_writeable(&out_filename);
if !out_is_writeable {
- sess.fatal(format!("output file {} is not writeable -- check its \
+ sess.fatal(&format!("output file {} is not writeable -- check its \
permissions.",
- out_filename.display()).index(&FullRange));
+ out_filename.display())[]);
}
else if !obj_is_writeable {
- sess.fatal(format!("object file {} is not writeable -- check its \
+ sess.fatal(&format!("object file {} is not writeable -- check its \
permissions.",
- obj_filename.display()).index(&FullRange));
+ obj_filename.display())[]);
}
match crate_type {
for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() {
match kind {
cstore::NativeStatic => {
- ab.add_native_library(l.index(&FullRange)).unwrap();
+ ab.add_native_library(&l[]).unwrap();
}
cstore::NativeFramework | cstore::NativeUnknown => {}
}
// the same filename for metadata (stomping over one another)
let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
let metadata = tmpdir.path().join(METADATA_FILENAME);
- match fs::File::create(&metadata).write(trans.metadata
- .index(&FullRange)) {
+ match fs::File::create(&metadata).write(&trans.metadata
+ []) {
Ok(..) => {}
Err(e) => {
- sess.err(format!("failed to write {}: {}",
+ sess.err(&format!("failed to write {}: {}",
metadata.display(),
- e).index(&FullRange));
+ e)[]);
sess.abort_if_errors();
}
}
// was exactly 16 bytes.
let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice());
let bc_deflated_filename = obj_filename.with_extension(
- format!("{}.bytecode.deflate", i).index(&FullRange));
+ &format!("{}.bytecode.deflate", i)[]);
let bc_data = match fs::File::open(&bc_filename).read_to_end() {
Ok(buffer) => buffer,
- Err(e) => sess.fatal(format!("failed to read bytecode: {}",
- e).index(&FullRange))
+ Err(e) => sess.fatal(&format!("failed to read bytecode: {}",
+ e)[])
};
- let bc_data_deflated = match flate::deflate_bytes(bc_data.index(&FullRange)) {
+ let bc_data_deflated = match flate::deflate_bytes(&bc_data[]) {
Some(compressed) => compressed,
- None => sess.fatal(format!("failed to compress bytecode from {}",
- bc_filename.display()).index(&FullRange))
+ None => sess.fatal(&format!("failed to compress bytecode from {}",
+ bc_filename.display())[])
};
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
Ok(file) => file,
Err(e) => {
- sess.fatal(format!("failed to create compressed bytecode \
- file: {}", e).index(&FullRange))
+ sess.fatal(&format!("failed to create compressed bytecode \
+ file: {}", e)[])
}
};
bc_data_deflated.as_slice()) {
Ok(()) => {}
Err(e) => {
- sess.err(format!("failed to write compressed bytecode: \
- {}", e).index(&FullRange));
+ sess.err(&format!("failed to write compressed bytecode: \
+ {}", e)[]);
sess.abort_if_errors()
}
};
try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) };
try! { writer.write_le_u32(1) };
try! { writer.write_le_u64(bc_data_deflated_size) };
- try! { writer.write(bc_data_deflated.index(&FullRange)) };
+ try! { writer.write(&bc_data_deflated[]) };
let number_of_bytes_written_so_far =
RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id
let ref name = sess.cstore.get_crate_data(cnum).name;
let p = match *path {
Some(ref p) => p.clone(), None => {
- sess.err(format!("could not find rlib for: `{}`",
- name).index(&FullRange));
+ sess.err(&format!("could not find rlib for: `{}`",
+ name)[]);
continue
}
};
- ab.add_rlib(&p, name.index(&FullRange), sess.lto()).unwrap();
+ ab.add_rlib(&p, &name[], sess.lto()).unwrap();
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
all_native_libs.extend(native_libs.into_iter());
cstore::NativeUnknown => "library",
cstore::NativeFramework => "framework",
};
- sess.note(format!("{}: {}", name, *lib).index(&FullRange));
+ sess.note(&format!("{}: {}", name, *lib)[]);
}
}
// The invocations of cc share some flags across platforms
let pname = get_cc_prog(sess);
- let mut cmd = Command::new(pname.index(&FullRange));
+ let mut cmd = Command::new(&pname[]);
- cmd.args(sess.target.target.options.pre_link_args.index(&FullRange));
+ cmd.args(&sess.target.target.options.pre_link_args[]);
link_args(&mut cmd, sess, dylib, tmpdir.path(),
trans, obj_filename, out_filename);
- cmd.args(sess.target.target.options.post_link_args.index(&FullRange));
+ cmd.args(&sess.target.target.options.post_link_args[]);
if !sess.target.target.options.no_compiler_rt {
cmd.arg("-lcompiler-rt");
}
match prog {
Ok(prog) => {
if !prog.status.success() {
- sess.err(format!("linking with `{}` failed: {}",
+ sess.err(&format!("linking with `{}` failed: {}",
pname,
- prog.status).index(&FullRange));
- sess.note(format!("{}", &cmd).index(&FullRange));
+ prog.status)[]);
+ sess.note(&format!("{}", &cmd)[]);
let mut output = prog.error.clone();
- output.push_all(prog.output.index(&FullRange));
- sess.note(str::from_utf8(output.index(&FullRange)).unwrap());
+ output.push_all(&prog.output[]);
+ sess.note(str::from_utf8(&output[]).unwrap());
sess.abort_if_errors();
}
debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap());
debug!("linker stdout:\n{}", String::from_utf8(prog.output).unwrap());
},
Err(e) => {
- sess.err(format!("could not exec the linker `{}`: {}",
+ sess.err(&format!("could not exec the linker `{}`: {}",
pname,
- e).index(&FullRange));
+ e)[]);
sess.abort_if_errors();
}
}
match Command::new("dsymutil").arg(out_filename).output() {
Ok(..) => {}
Err(e) => {
- sess.err(format!("failed to run dsymutil: {}", e).index(&FullRange));
+ sess.err(&format!("failed to run dsymutil: {}", e)[]);
sess.abort_if_errors();
}
}
let mut v = b"-Wl,-force_load,".to_vec();
v.push_all(morestack.as_vec());
- cmd.arg(v.index(&FullRange));
+ cmd.arg(&v[]);
} else {
cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]);
}
if sess.opts.cg.rpath {
let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec();
v.push_all(out_filename.filename().unwrap());
- cmd.arg(v.index(&FullRange));
+ cmd.arg(&v[]);
}
} else {
cmd.arg("-shared");
// addl_lib_search_paths
if sess.opts.cg.rpath {
let sysroot = sess.sysroot();
- let target_triple = sess.opts.target_triple.index(&FullRange);
+ let target_triple = &sess.opts.target_triple[];
let get_install_prefix_lib_path = |:| {
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
get_install_prefix_lib_path: get_install_prefix_lib_path,
realpath: ::util::fs::realpath
};
- cmd.args(rpath::get_rpath_flags(rpath_config).index(&FullRange));
+ cmd.args(&rpath::get_rpath_flags(rpath_config)[]);
}
// Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate
let empty = Vec::new();
- cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).index(&FullRange));
- cmd.args(used_link_args.index(&FullRange));
+ cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]);
+ cmd.args(&used_link_args[]);
}
// # Native library linking
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
- let lib = archive::find_library(l.index(&FullRange),
+ let lib = archive::find_library(&l[],
sess.target.target.options.staticlib_prefix.as_slice(),
sess.target.target.options.staticlib_suffix.as_slice(),
- search_path.index(&FullRange),
+ &search_path[],
&sess.diagnostic().handler);
let mut v = b"-Wl,-force_load,".to_vec();
v.push_all(lib.as_vec());
- cmd.arg(v.index(&FullRange));
+ cmd.arg(&v[]);
}
}
if takes_hints {
cmd.arg(format!("-l{}", l));
}
cstore::NativeFramework => {
- cmd.arg("-framework").arg(l.index(&FullRange));
+ cmd.arg("-framework").arg(&l[]);
}
cstore::NativeStatic => unreachable!(),
}
// Converts a library file-stem into a cc -l argument
fn unlib<'a>(config: &config::Config, stem: &'a [u8]) -> &'a [u8] {
if stem.starts_with("lib".as_bytes()) && !config.target.options.is_like_windows {
- stem.index(&(3..))
+ &stem[3..]
} else {
stem
}
// against the archive.
if sess.lto() {
let name = cratepath.filename_str().unwrap();
- let name = name.index(&(3..(name.len() - 5))); // chop off lib/.rlib
+ let name = &name[3..(name.len() - 5)]; // chop off lib/.rlib
time(sess.time_passes(),
- format!("altering {}.rlib", name).index(&FullRange),
+ &format!("altering {}.rlib", name)[],
(), |()| {
let dst = tmpdir.join(cratepath.filename().unwrap());
match fs::copy(&cratepath, &dst) {
Ok(..) => {}
Err(e) => {
- sess.err(format!("failed to copy {} to {}: {}",
+ sess.err(&format!("failed to copy {} to {}: {}",
cratepath.display(),
dst.display(),
- e).index(&FullRange));
+ e)[]);
sess.abort_if_errors();
}
}
match fs::chmod(&dst, io::USER_READ | io::USER_WRITE) {
Ok(..) => {}
Err(e) => {
- sess.err(format!("failed to chmod {} when preparing \
+ sess.err(&format!("failed to chmod {} when preparing \
for LTO: {}", dst.display(),
- e).index(&FullRange));
+ e)[]);
sess.abort_if_errors();
}
}
maybe_ar_prog: sess.opts.cg.ar.clone()
};
let mut archive = Archive::open(config);
- archive.remove_file(format!("{}.o", name).index(&FullRange));
+ archive.remove_file(&format!("{}.o", name)[]);
let files = archive.files();
- if files.iter().any(|s| s.index(&FullRange).ends_with(".o")) {
+ if files.iter().any(|s| s[].ends_with(".o")) {
cmd.arg(dst);
}
});
let mut v = "-l".as_bytes().to_vec();
v.push_all(unlib(&sess.target, cratepath.filestem().unwrap()));
- cmd.arg(v.index(&FullRange));
+ cmd.arg(&v[]);
}
}
}
cstore::NativeFramework => {
cmd.arg("-framework");
- cmd.arg(lib.index(&FullRange));
+ cmd.arg(&lib[]);
}
cstore::NativeStatic => {
sess.bug("statics shouldn't be propagated");
let path = match path {
Some(p) => p,
None => {
- sess.fatal(format!("could not find rlib for: `{}`",
- name).index(&FullRange));
+ sess.fatal(&format!("could not find rlib for: `{}`",
+ name)[]);
}
};
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
let file = path.filename_str().unwrap();
- let file = file.index(&(3..(file.len() - 5))); // chop off lib/.rlib
+ let file = &file[3..(file.len() - 5)]; // chop off lib/.rlib
debug!("reading {}", file);
for i in iter::count(0u, 1) {
let bc_encoded = time(sess.time_passes(),
format!("check for {}.{}.bytecode.deflate", name, i).as_slice(),
(),
|_| {
- archive.read(format!("{}.{}.bytecode.deflate",
- file, i).index(&FullRange))
+ archive.read(&format!("{}.{}.bytecode.deflate",
+ file, i)[])
});
let bc_encoded = match bc_encoded {
Some(data) => data,
None => {
if i == 0 {
// No bitcode was found at all.
- sess.fatal(format!("missing compressed bytecode in {}",
- path.display()).index(&FullRange));
+ sess.fatal(&format!("missing compressed bytecode in {}",
+ path.display())[]);
}
// No more bitcode files to read.
break;
if version == 1 {
// The only version existing so far
let data_size = extract_compressed_bytecode_size_v1(bc_encoded);
- let compressed_data = bc_encoded.index(&(
+ let compressed_data = &bc_encoded[
link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET..
- (link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint)));
+ (link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint)];
match flate::inflate_bytes(compressed_data) {
Some(inflated) => inflated,
None => {
- sess.fatal(format!("failed to decompress bc of `{}`",
- name).index(&FullRange))
+ sess.fatal(&format!("failed to decompress bc of `{}`",
+ name)[])
}
}
} else {
- sess.fatal(format!("Unsupported bytecode format version {}",
- version).index(&FullRange))
+ sess.fatal(&format!("Unsupported bytecode format version {}",
+ version)[])
}
})
} else {
match flate::inflate_bytes(bc_encoded) {
Some(bc) => bc,
None => {
- sess.fatal(format!("failed to decompress bc of `{}`",
- name).index(&FullRange))
+ sess.fatal(&format!("failed to decompress bc of `{}`",
+ name)[])
}
}
})
let ptr = bc_decoded.as_slice().as_ptr();
debug!("linking {}, part {}", name, i);
time(sess.time_passes(),
- format!("ll link {}.{}", name, i).index(&FullRange),
+ &format!("ll link {}.{}", name, i)[],
(),
|()| unsafe {
if !llvm::LLVMRustLinkInExternalBitcode(llmod,
bc_decoded.len() as libc::size_t) {
write::llvm_err(sess.diagnostic().handler(),
format!("failed to load bc of `{}`",
- name.index(&FullRange)));
+ &name[]));
}
});
}
fn is_versioned_bytecode_format(bc: &[u8]) -> bool {
let magic_id_byte_count = link::RLIB_BYTECODE_OBJECT_MAGIC.len();
return bc.len() > magic_id_byte_count &&
- bc.index(&(0..magic_id_byte_count)) == link::RLIB_BYTECODE_OBJECT_MAGIC;
+ &bc[0..magic_id_byte_count] == link::RLIB_BYTECODE_OBJECT_MAGIC;
}
fn extract_bytecode_format_version(bc: &[u8]) -> u32 {
}
fn read_from_le_bytes<T: Int>(bytes: &[u8], position_in_bytes: uint) -> T {
- let byte_data = bytes.index(&(position_in_bytes..
- (position_in_bytes + mem::size_of::<T>())));
+ let byte_data = &bytes[position_in_bytes..(position_in_bytes + mem::size_of::<T>())];
let data = unsafe {
*(byte_data.as_ptr() as *const T)
};
unsafe {
let cstr = llvm::LLVMRustGetLastError();
if cstr == ptr::null() {
- handler.fatal(msg.index(&FullRange));
+ handler.fatal(&msg[]);
} else {
let err = ffi::c_str_to_bytes(&cstr);
let err = String::from_utf8_lossy(err.as_slice()).to_string();
libc::free(cstr as *mut _);
- handler.fatal(format!("{}: {}",
- msg.index(&FullRange),
- err.index(&FullRange)).index(&FullRange));
+ handler.fatal(&format!("{}: {}",
+ &msg[],
+ &err[])[]);
}
}
}
match diag.code {
Some(ref code) => {
handler.emit_with_code(None,
- diag.msg.index(&FullRange),
- code.index(&FullRange),
+ &diag.msg[],
+ &code[],
diag.lvl);
},
None => {
handler.emit(None,
- diag.msg.index(&FullRange),
+ &diag.msg[],
diag.lvl);
},
}
fn create_target_machine(sess: &Session) -> TargetMachineRef {
let reloc_model_arg = match sess.opts.cg.relocation_model {
- Some(ref s) => s.index(&FullRange),
- None => sess.target.target.options.relocation_model.index(&FullRange)
+ Some(ref s) => &s[],
+ None => &sess.target.target.options.relocation_model[]
};
let reloc_model = match reloc_model_arg {
"pic" => llvm::RelocPIC,
"default" => llvm::RelocDefault,
"dynamic-no-pic" => llvm::RelocDynamicNoPic,
_ => {
- sess.err(format!("{:?} is not a valid relocation mode",
+ sess.err(&format!("{:?} is not a valid relocation mode",
sess.opts
.cg
- .relocation_model).index(&FullRange));
+ .relocation_model)[]);
sess.abort_if_errors();
unreachable!();
}
let fdata_sections = ffunction_sections;
let code_model_arg = match sess.opts.cg.code_model {
- Some(ref s) => s.index(&FullRange),
- None => sess.target.target.options.code_model.index(&FullRange)
+ Some(ref s) => &s[],
+ None => &sess.target.target.options.code_model[]
};
let code_model = match code_model_arg {
"medium" => llvm::CodeModelMedium,
"large" => llvm::CodeModelLarge,
_ => {
- sess.err(format!("{:?} is not a valid code model",
+ sess.err(&format!("{:?} is not a valid code model",
sess.opts
.cg
- .code_model).index(&FullRange));
+ .code_model)[]);
sess.abort_if_errors();
unreachable!();
}
};
- let triple = sess.target.target.llvm_target.index(&FullRange);
+ let triple = &sess.target.target.llvm_target[];
let tm = unsafe {
let triple = CString::from_slice(triple.as_bytes());
match cgcx.lto_ctxt {
Some((sess, _)) => {
sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info {
- Some(ei) => sess.span_err(ei.call_site, msg.index(&FullRange)),
- None => sess.err(msg.index(&FullRange)),
+ Some(ei) => sess.span_err(ei.call_site, &msg[]),
+ None => sess.err(&msg[]),
});
}
None => {
- cgcx.handler.err(msg.index(&FullRange));
+ cgcx.handler.err(&msg[]);
cgcx.handler.note("build without -C codegen-units for more exact errors");
}
}
}
if config.emit_asm {
- let path = output_names.with_extension(format!("{}.s", name_extra).index(&FullRange));
+ let path = output_names.with_extension(&format!("{}.s", name_extra)[]);
with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType);
});
}
if config.emit_obj {
- let path = output_names.with_extension(format!("{}.o", name_extra).index(&FullRange));
+ let path = output_names.with_extension(&format!("{}.o", name_extra)[]);
with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType);
});
// Process the work items, optionally using worker threads.
if sess.opts.cg.codegen_units == 1 {
- run_work_singlethreaded(sess, trans.reachable.index(&FullRange), work_items);
+ run_work_singlethreaded(sess, &trans.reachable[], work_items);
} else {
run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units);
}
if crate_output.single_output_file.is_some() {
// 2) Multiple codegen units, with `-o some_name`. We have
// no good solution for this case, so warn the user.
- sess.warn(format!("ignoring -o because multiple .{} files were produced",
- ext).index(&FullRange));
+ sess.warn(&format!("ignoring -o because multiple .{} files were produced",
+ ext)[]);
} else {
// 3) Multiple codegen units, but no `-o some_name`. We
// just leave the `foo.0.x` files in place.
};
let pname = get_cc_prog(sess);
- let mut cmd = Command::new(pname.index(&FullRange));
+ let mut cmd = Command::new(&pname[]);
- cmd.args(sess.target.target.options.pre_link_args.index(&FullRange));
+ cmd.args(&sess.target.target.options.pre_link_args[]);
cmd.arg("-nostdlib");
for index in range(0, trans.modules.len()) {
- cmd.arg(crate_output.with_extension(format!("{}.o", index).index(&FullRange)));
+ cmd.arg(crate_output.with_extension(&format!("{}.o", index)[]));
}
cmd.arg("-r")
.arg("-o")
.arg(windows_output_path.as_ref().unwrap_or(output_path));
- cmd.args(sess.target.target.options.post_link_args.index(&FullRange));
+ cmd.args(&sess.target.target.options.post_link_args[]);
if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 {
println!("{}", &cmd);
match cmd.status() {
Ok(status) => {
if !status.success() {
- sess.err(format!("linking of {} with `{}` failed",
- output_path.display(), cmd).index(&FullRange));
+ sess.err(&format!("linking of {} with `{}` failed",
+ output_path.display(), cmd)[]);
sess.abort_if_errors();
}
},
Err(e) => {
- sess.err(format!("could not exec the linker `{}`: {}",
+ sess.err(&format!("could not exec the linker `{}`: {}",
pname,
- e).index(&FullRange));
+ e)[]);
sess.abort_if_errors();
},
}
for i in range(0, trans.modules.len()) {
if modules_config.emit_obj {
let ext = format!("{}.o", i);
- remove(sess, &crate_output.with_extension(ext.index(&FullRange)));
+ remove(sess, &crate_output.with_extension(&ext[]));
}
if modules_config.emit_bc && !keep_numbered_bitcode {
let ext = format!("{}.bc", i);
- remove(sess, &crate_output.with_extension(ext.index(&FullRange)));
+ remove(sess, &crate_output.with_extension(&ext[]));
}
}
pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
let pname = get_cc_prog(sess);
- let mut cmd = Command::new(pname.index(&FullRange));
+ let mut cmd = Command::new(&pname[]);
cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject))
.arg(outputs.temp_path(config::OutputTypeAssembly));
match cmd.output() {
Ok(prog) => {
if !prog.status.success() {
- sess.err(format!("linking with `{}` failed: {}",
+ sess.err(&format!("linking with `{}` failed: {}",
pname,
- prog.status).index(&FullRange));
- sess.note(format!("{}", &cmd).index(&FullRange));
+ prog.status)[]);
+ sess.note(&format!("{}", &cmd)[]);
let mut note = prog.error.clone();
- note.push_all(prog.output.index(&FullRange));
- sess.note(str::from_utf8(note.index(&FullRange)).unwrap());
+ note.push_all(&prog.output[]);
+ sess.note(str::from_utf8(¬e[]).unwrap());
sess.abort_if_errors();
}
},
Err(e) => {
- sess.err(format!("could not exec the linker `{}`: {}",
+ sess.err(&format!("could not exec the linker `{}`: {}",
pname,
- e).index(&FullRange));
+ e)[]);
sess.abort_if_errors();
}
}
if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
for arg in sess.opts.cg.llvm_args.iter() {
- add((*arg).index(&FullRange));
+ add(&(*arg)[]);
}
}
// dump info about all the external crates referenced from this crate
self.sess.cstore.iter_crate_data(|n, cmd| {
- self.fmt.external_crate_str(krate.span, cmd.name.index(&FullRange), n);
+ self.fmt.external_crate_str(krate.span, &cmd.name[], n);
});
self.fmt.recorder.record("end_external_crates\n");
}
for &(ref span, ref qualname) in sub_paths.iter() {
self.fmt.sub_mod_ref_str(path.span,
*span,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope);
}
}
for &(ref span, ref qualname) in sub_paths.iter() {
self.fmt.sub_mod_ref_str(path.span,
*span,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope);
}
}
let (ref span, ref qualname) = sub_paths[len-2];
self.fmt.sub_type_ref_str(path.span,
*span,
- qualname.index(&FullRange));
+ &qualname[]);
// write the other sub-paths
if len <= 2 {
return;
}
- let sub_paths = sub_paths.index(&(0..(len-2)));
+ let sub_paths = &sub_paths[0..(len-2)];
for &(ref span, ref qualname) in sub_paths.iter() {
self.fmt.sub_mod_ref_str(path.span,
*span,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope);
}
}
// looks up anything, not just a type
fn lookup_type_ref(&self, ref_id: NodeId) -> Option<DefId> {
if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) {
- self.sess.bug(format!("def_map has no key for {} in lookup_type_ref",
- ref_id).index(&FullRange));
+ self.sess.bug(&format!("def_map has no key for {} in lookup_type_ref",
+ ref_id)[]);
}
let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id];
match def {
fn lookup_def_kind(&self, ref_id: NodeId, span: Span) -> Option<recorder::Row> {
let def_map = self.analysis.ty_cx.def_map.borrow();
if !def_map.contains_key(&ref_id) {
- self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind",
- ref_id).index(&FullRange));
+ self.sess.span_bug(span, &format!("def_map has no key for {} in lookup_def_kind",
+ ref_id)[]);
}
let def = (*def_map)[ref_id];
match def {
def::DefUse(_) |
def::DefMethod(..) |
def::DefPrimTy(_) => {
- self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {:?}",
- def).index(&FullRange));
+ self.sess.span_bug(span, &format!("lookup_def_kind for unexpected item: {:?}",
+ def)[]);
},
}
}
span_utils.span_for_last_ident(p.span),
id,
qualname,
- path_to_string(p).index(&FullRange),
- typ.index(&FullRange));
+ &path_to_string(p)[],
+ &typ[]);
}
self.collected_paths.clear();
}
match item.node {
ast::ItemImpl(_, _, _, _, ref ty, _) => {
let mut result = String::from_str("<");
- result.push_str(ty_to_string(&**ty).index(&FullRange));
+ result.push_str(&ty_to_string(&**ty)[]);
match ty::trait_of_item(&self.analysis.ty_cx,
ast_util::local_def(method.id)) {
}
_ => {
self.sess.span_bug(method.span,
- format!("Container {} for method {} not an impl?",
- impl_id.node, method.id).index(&FullRange));
+ &format!("Container {} for method {} not an impl?",
+ impl_id.node, method.id)[]);
},
}
},
_ => {
self.sess.span_bug(method.span,
- format!("Container {} for method {} is not a node item {:?}",
- impl_id.node,
- method.id,
- self.analysis.ty_cx.map.get(impl_id.node)
- ).index(&FullRange));
+ &format!(
+ "Container {} for method {} is not a node item {:?}",
+ impl_id.node,
+ method.id,
+ self.analysis.ty_cx.map.get(impl_id.node))[]);
},
},
None => match ty::trait_of_item(&self.analysis.ty_cx,
}
_ => {
self.sess.span_bug(method.span,
- format!("Could not find container {} for method {}",
- def_id.node, method.id).index(&FullRange));
+ &format!("Could not find container {} for method {}",
+ def_id.node, method.id)[]);
}
}
},
None => {
self.sess.span_bug(method.span,
- format!("Could not find container for method {}",
- method.id).index(&FullRange));
+ &format!("Could not find container for method {}",
+ method.id)[]);
},
},
};
qualname.push_str(get_ident(method.pe_ident()).get());
- let qualname = qualname.index(&FullRange);
+ let qualname = &qualname[];
// record the decl for this def (if it has one)
let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx,
Some(sub_span) => self.fmt.field_str(field.span,
Some(sub_span),
field.node.id,
- name.get().index(&FullRange),
- qualname.index(&FullRange),
- typ.index(&FullRange),
+ &name.get()[],
+ &qualname[],
+ &typ[],
scope_id),
None => self.sess.span_bug(field.span,
- format!("Could not find sub-span for field {}",
- qualname).index(&FullRange)),
+ &format!("Could not find sub-span for field {}",
+ qualname)[]),
}
},
_ => (),
self.fmt.typedef_str(full_span,
Some(*param_ss),
param.id,
- name.index(&FullRange),
+ &name[],
"");
}
self.visit_generics(generics);
self.fmt.fn_str(item.span,
sub_span,
item.id,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope);
- self.process_formals(&decl.inputs, qualname.index(&FullRange));
+ self.process_formals(&decl.inputs, &qualname[]);
// walk arg and return types
for arg in decl.inputs.iter() {
// walk the body
self.nest(item.id, |v| v.visit_block(&*body));
- self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id);
+ self.process_generic_params(ty_params, item.span, &qualname[], item.id);
}
fn process_static(&mut self,
sub_span,
item.id,
get_ident(item.ident).get(),
- qualname.index(&FullRange),
- value.index(&FullRange),
- ty_to_string(&*typ).index(&FullRange),
+ &qualname[],
+ &value[],
+ &ty_to_string(&*typ)[],
self.cur_scope);
// walk type and init value
sub_span,
item.id,
get_ident(item.ident).get(),
- qualname.index(&FullRange),
+ &qualname[],
"",
- ty_to_string(&*typ).index(&FullRange),
+ &ty_to_string(&*typ)[],
self.cur_scope);
// walk type and init value
sub_span,
item.id,
ctor_id,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope,
- val.index(&FullRange));
+ &val[]);
// fields
for field in def.fields.iter() {
- self.process_struct_field_def(field, qualname.index(&FullRange), item.id);
+ self.process_struct_field_def(field, &qualname[], item.id);
self.visit_ty(&*field.node.ty);
}
- self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id);
+ self.process_generic_params(ty_params, item.span, &qualname[], item.id);
}
fn process_enum(&mut self,
Some(sub_span) => self.fmt.enum_str(item.span,
Some(sub_span),
item.id,
- enum_name.index(&FullRange),
+ &enum_name[],
self.cur_scope,
- val.index(&FullRange)),
+ &val[]),
None => self.sess.span_bug(item.span,
- format!("Could not find subspan for enum {}",
- enum_name).index(&FullRange)),
+ &format!("Could not find subspan for enum {}",
+ enum_name)[]),
}
for variant in enum_definition.variants.iter() {
let name = get_ident(variant.node.name);
self.span.span_for_first_ident(variant.span),
variant.node.id,
name,
- qualname.index(&FullRange),
- enum_name.index(&FullRange),
- val.index(&FullRange),
+ &qualname[],
+ &enum_name[],
+ &val[],
item.id);
for arg in args.iter() {
self.visit_ty(&*arg.ty);
self.span.span_for_first_ident(variant.span),
variant.node.id,
ctor_id,
- qualname.index(&FullRange),
- enum_name.index(&FullRange),
- val.index(&FullRange),
+ &qualname[],
+ &enum_name[],
+ &val[],
item.id);
for field in struct_def.fields.iter() {
}
}
- self.process_generic_params(ty_params, item.span, enum_name.index(&FullRange), item.id);
+ self.process_generic_params(ty_params, item.span, &enum_name[], item.id);
}
fn process_impl(&mut self,
self.fmt.trait_str(item.span,
sub_span,
item.id,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope,
- val.index(&FullRange));
+ &val[]);
// super-traits
for super_bound in trait_refs.iter() {
}
// walk generics and methods
- self.process_generic_params(generics, item.span, qualname.index(&FullRange), item.id);
+ self.process_generic_params(generics, item.span, &qualname[], item.id);
for method in methods.iter() {
self.visit_trait_item(method)
}
self.fmt.mod_str(item.span,
sub_span,
item.id,
- qualname.index(&FullRange),
+ &qualname[],
self.cur_scope,
- filename.index(&FullRange));
+ &filename[]);
self.nest(item.id, |v| visit::walk_mod(v, m));
}
def_id,
self.cur_scope),
_ => self.sess.span_bug(span,
- format!("Unexpected def kind while looking up path in '{}'",
- self.span.snippet(span)).index(&FullRange)),
+ &format!("Unexpected def kind while looking up path in '{}'",
+ self.span.snippet(span))[]),
}
// modules or types in the path prefix
match *def {
self.cur_scope);
// walk receiver and args
- visit::walk_exprs(self, args.index(&FullRange));
+ visit::walk_exprs(self, &args[]);
}
fn process_pat(&mut self, p:&ast::Pat) {
Some(sd) => sd,
None => {
self.sess.span_bug(p.span,
- format!("Could not find struct_def for `{}`",
- self.span.snippet(p.span)).index(&FullRange));
+ &format!("Could not find struct_def for `{}`",
+ self.span.snippet(p.span))[]);
}
};
for &Spanned { node: ref field, span } in fields.iter() {
self.fmt.typedef_str(item.span,
sub_span,
item.id,
- qualname.index(&FullRange),
- value.index(&FullRange));
+ &qualname[],
+ &value[]);
self.visit_ty(&**ty);
self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id);
},
None => {
self.sess.span_bug(method_type.span,
- format!("Could not find trait for method {}",
- method_type.id).index(&FullRange));
+ &format!("Could not find trait for method {}",
+ method_type.id)[]);
},
};
qualname.push_str(get_ident(method_type.ident).get());
- let qualname = qualname.index(&FullRange);
+ let qualname = &qualname[];
let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn);
self.fmt.method_decl_str(method_type.span,
id,
cnum,
name,
- s.index(&FullRange),
+ &s[],
self.cur_scope);
},
}
}
let mut id = String::from_str("$");
- id.push_str(ex.id.to_string().index(&FullRange));
- self.process_formals(&decl.inputs, id.index(&FullRange));
+ id.push_str(&ex.id.to_string()[]);
+ self.process_formals(&decl.inputs, &id[]);
// walk arg and return types
for arg in decl.inputs.iter() {
let def_map = self.analysis.ty_cx.def_map.borrow();
if !def_map.contains_key(&id) {
self.sess.span_bug(p.span,
- format!("def_map has no key for {} in visit_arm",
- id).index(&FullRange));
+ &format!("def_map has no key for {} in visit_arm",
+ id)[]);
}
let def = &(*def_map)[id];
match *def {
self.fmt.variable_str(p.span,
Some(p.span),
id,
- path_to_string(p).index(&FullRange),
- value.index(&FullRange),
+ &path_to_string(p)[],
+ &value[],
"")
}
def::DefVariant(..) => {
self.fmt.variable_str(p.span,
sub_span,
id,
- path_to_string(p).index(&FullRange),
- value.index(&FullRange),
- typ.index(&FullRange));
+ &path_to_string(p)[],
+ &value[],
+ &typ[]);
}
self.collected_paths.clear();
}
assert!(analysis.glob_map.is_some());
- let cratename = match attr::find_crate_name(krate.attrs.index(&FullRange)) {
+ let cratename = match attr::find_crate_name(&krate.attrs[]) {
Some(name) => name.get().to_string(),
None => {
info!("Could not find crate name, using 'unknown_crate'");
};
match fs::mkdir_recursive(&root_path, io::USER_RWX) {
- Err(e) => sess.err(format!("Could not create directory {}: {}",
- root_path.display(), e).index(&FullRange)),
+ Err(e) => sess.err(&format!("Could not create directory {}: {}",
+ root_path.display(), e)[]),
_ => (),
}
Ok(f) => box f,
Err(e) => {
let disp = root_path.display();
- sess.fatal(format!("Could not open {}: {}", disp, e).index(&FullRange));
+ sess.fatal(&format!("Could not open {}: {}", disp, e)[]);
}
};
root_path.pop();
cur_scope: 0
};
- visitor.dump_crate_info(cratename.index(&FullRange), krate);
+ visitor.dump_crate_info(&cratename[], krate);
visit::walk_crate(&mut visitor, krate);
}
assert!(self.dump_spans);
let result = format!("span,kind,{},{},text,\"{}\"\n",
kind, su.extent_str(span), escape(su.snippet(span)));
- self.record(result.index(&FullRange));
+ self.record(&result[]);
}
}
values: Vec<String>,
span: Span) -> Option<String> {
if values.len() != fields.len() {
- self.span.sess.span_bug(span, format!(
+ self.span.sess.span_bug(span, &format!(
"Mismatch between length of fields for '{}', expected '{}', found '{}'",
- kind, fields.len(), values.len()).index(&FullRange));
+ kind, fields.len(), values.len())[]);
}
let values = values.iter().map(|s| {
// Never take more than 1020 chars
if s.len() > 1020 {
- s.index(&(0..1020))
+ &s[0..1020]
} else {
- s.index(&FullRange)
+ &s[]
}
});
}
)));
Some(strs.fold(String::new(), |mut s, ss| {
- s.push_str(ss.index(&FullRange));
+ s.push_str(&ss[]);
s
}))
}
let (label, ref fields, needs_span, dump_spans) = FmtStrs::lookup_row(kind);
if needs_span {
- self.span.sess.span_bug(span, format!(
+ self.span.sess.span_bug(span, &format!(
"Called record_without_span for '{}' which does requires a span",
- label).index(&FullRange));
+ label)[]);
}
assert!(!dump_spans);
};
let mut result = String::from_str(label);
- result.push_str(values_str.index(&FullRange));
+ result.push_str(&values_str[]);
result.push_str("\n");
- self.recorder.record(result.index(&FullRange));
+ self.recorder.record(&result[]);
}
pub fn record_with_span(&mut self,
None => return,
};
let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str);
- self.recorder.record(result.index(&FullRange));
+ self.recorder.record(&result[]);
}
pub fn check_and_record(&mut self,
// variable def's node id
let mut qualname = String::from_str(name);
qualname.push_str("$");
- qualname.push_str(id.to_string().index(&FullRange));
+ qualname.push_str(&id.to_string()[]);
self.check_and_record(Variable,
span,
sub_span,
if bracket_count != 0 {
let loc = self.sess.codemap().lookup_char_pos(span.lo);
self.sess.span_bug(span,
- format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
- self.snippet(span), loc.file.name, loc.line).index(&FullRange));
+ &format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
+ self.snippet(span), loc.file.name, loc.line)[]);
}
if result.is_none() && prev.tok.is_ident() && bracket_count == 0 {
return self.make_sub_span(span, Some(prev.sp));
if ts.tok == token::Eof {
if bracket_count != 0 {
let loc = self.sess.codemap().lookup_char_pos(span.lo);
- self.sess.span_bug(span, format!(
+ self.sess.span_bug(span, &format!(
"Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
- self.snippet(span), loc.file.name, loc.line).index(&FullRange));
+ self.snippet(span), loc.file.name, loc.line)[]);
}
return result
}
let _indenter = indenter();
m.iter().filter_map(|br| {
- e(br.pats.index(&FullRange)).map(|pats| {
+ e(&br.pats[]).map(|pats| {
let this = br.pats[col];
let mut bound_ptrs = br.bound_ptrs.clone();
match this.node {
// Collect all of the matches that can match against anything.
enter_match(bcx, dm, m, col, val, |pats| {
if pat_is_binding_or_wild(dm, &*pats[col]) {
- let mut r = pats.index(&(0..col)).to_vec();
- r.push_all(pats.index(&((col + 1)..)));
+ let mut r = pats[0..col].to_vec();
+ r.push_all(&pats[(col + 1)..]);
Some(r)
} else {
None
param_env: param_env,
};
enter_match(bcx, dm, m, col, val, |pats|
- check_match::specialize(&mcx, pats.index(&FullRange), &ctor, col, variant_size)
+ check_match::specialize(&mcx, &pats[], &ctor, col, variant_size)
)
}
-> Result<'blk, 'tcx> {
let did = langcall(cx,
None,
- format!("comparison of `{}`",
- cx.ty_to_string(rhs_t)).index(&FullRange),
+ &format!("comparison of `{}`",
+ cx.ty_to_string(rhs_t))[],
StrEqFnLangItem);
callee::trans_lang_call(cx, did, &[lhs, rhs], None)
}
if has_nested_bindings(m, col) {
let expanded = expand_nested_bindings(bcx, m, col, val);
compile_submatch_continue(bcx,
- expanded.index(&FullRange),
+ &expanded[],
vals,
chk,
col,
bcx = compile_guard(bcx,
&**guard_expr,
m[0].data,
- m.index(&(1..m.len())),
+ &m[1..m.len()],
vals,
chk,
has_genuine_default);
let tcx = bcx.tcx();
let dm = &tcx.def_map;
- let mut vals_left = vals.index(&(0u..col)).to_vec();
- vals_left.push_all(vals.index(&((col + 1u)..)));
+ let mut vals_left = vals[0u..col].to_vec();
+ vals_left.push_all(&vals[(col + 1u)..]);
let ccx = bcx.fcx.ccx;
// Find a real id (we're adding placeholder wildcard patterns, but
}
let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
let mut opt_vals = unpacked;
- opt_vals.push_all(vals_left.index(&FullRange));
+ opt_vals.push_all(&vals_left[]);
compile_submatch(opt_cx,
- opt_ms.index(&FullRange),
- opt_vals.index(&FullRange),
+ &opt_ms[],
+ &opt_vals[],
branch_chk.as_ref().unwrap_or(chk),
has_genuine_default);
}
}
_ => {
compile_submatch(else_cx,
- defaults.index(&FullRange),
- vals_left.index(&FullRange),
+ &defaults[],
+ &vals_left[],
chk,
has_genuine_default);
}
"__llmatch");
trmode = TrByCopy(alloca_no_lifetime(bcx,
llvariable_ty,
- bcx.ident(ident).index(&FullRange)));
+ &bcx.ident(ident)[]));
}
ast::BindByValue(_) => {
// in this case, the final type of the variable will be T,
// above
llmatch = alloca_no_lifetime(bcx,
llvariable_ty.ptr_to(),
- bcx.ident(ident).index(&FullRange));
+ &bcx.ident(ident)[]);
trmode = TrByMove;
}
ast::BindByRef(_) => {
llmatch = alloca_no_lifetime(bcx,
llvariable_ty,
- bcx.ident(ident).index(&FullRange));
+ &bcx.ident(ident)[]);
trmode = TrByRef;
}
};
&& arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle)
});
- compile_submatch(bcx, matches.index(&FullRange), &[discr_datum.val], &chk, has_default);
+ compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default);
let mut arm_cxs = Vec::new();
for arm_data in arm_datas.iter() {
arm_cxs.push(bcx);
}
- bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.index(&FullRange));
+ bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[]);
return bcx;
}
let var_ty = node_id_type(bcx, p_id);
// Allocate memory on stack for the binding.
- let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).index(&FullRange));
+ let llval = alloc_ty(bcx, var_ty, &bcx.ident(*ident)[]);
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.
pat.repr(bcx.tcx()));
if bcx.sess().asm_comments() {
- add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
- pat.repr(bcx.tcx())).index(&FullRange));
+ add_comment(bcx, &format!("bind_irrefutable_pat(pat={})",
+ pat.repr(bcx.tcx()))[]);
}
let _indenter = indenter();
t: Ty<'tcx>) -> Repr<'tcx> {
match t.sty {
ty::ty_tup(ref elems) => {
- Univariant(mk_struct(cx, elems.index(&FullRange), false, t), false)
+ Univariant(mk_struct(cx, &elems[], false, t), false)
}
ty::ty_struct(def_id, substs) => {
let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
if dtor { ftys.push(cx.tcx().types.bool); }
- Univariant(mk_struct(cx, ftys.index(&FullRange), packed, t), dtor)
+ Univariant(mk_struct(cx, &ftys[], packed, t), dtor)
}
ty::ty_unboxed_closure(def_id, _, substs) => {
let typer = NormalizingUnboxedClosureTyper::new(cx.tcx());
let upvars = typer.unboxed_closure_upvars(def_id, substs).unwrap();
let upvar_types = upvars.iter().map(|u| u.ty).collect::<Vec<_>>();
- Univariant(mk_struct(cx, upvar_types.index(&FullRange), false, t), false)
+ Univariant(mk_struct(cx, &upvar_types[], false, t), false)
}
ty::ty_enum(def_id, substs) => {
let cases = get_cases(cx.tcx(), def_id, substs);
- let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).index(&FullRange).get(0)
+ let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0)
.unwrap_or(&attr::ReprAny);
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
// (Typechecking will reject discriminant-sizing attrs.)
assert_eq!(hint, attr::ReprAny);
let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() };
- return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t),
+ return Univariant(mk_struct(cx, &ftys[], false, t),
dtor);
}
// non-empty body, explicit discriminants should have
// been rejected by a checker before this point.
if !cases.iter().enumerate().all(|(i,c)| c.discr == (i as Disr)) {
- cx.sess().bug(format!("non-C-like enum {} with specified \
+ cx.sess().bug(&format!("non-C-like enum {} with specified \
discriminants",
ty::item_path_str(cx.tcx(),
- def_id)).index(&FullRange));
+ def_id))[]);
}
if cases.len() == 1 {
assert_eq!(hint, attr::ReprAny);
let mut ftys = cases[0].tys.clone();
if dtor { ftys.push(cx.tcx().types.bool); }
- return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t),
+ return Univariant(mk_struct(cx, &ftys[], false, t),
dtor);
}
let mut discr = 0;
while discr < 2 {
if cases[1 - discr].is_zerolen(cx, t) {
- let st = mk_struct(cx, cases[discr].tys.index(&FullRange),
+ let st = mk_struct(cx, &cases[discr].tys[],
false, t);
match cases[discr].find_ptr(cx) {
Some(ref df) if df.len() == 1 && st.fields.len() == 1 => {
let fields : Vec<_> = cases.iter().map(|c| {
let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity));
- ftys.push_all(c.tys.index(&FullRange));
+ ftys.push_all(&c.tys[]);
if dtor { ftys.push(cx.tcx().types.bool); }
- mk_struct(cx, ftys.index(&FullRange), false, t)
+ mk_struct(cx, &ftys[], false, t)
}).collect();
- ensure_enum_fits_in_address_space(cx, ity, fields.index(&FullRange), t);
+ ensure_enum_fits_in_address_space(cx, ity, &fields[], t);
General(ity, fields, dtor)
}
- _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}",
- ty_to_string(cx.tcx(), t)).index(&FullRange))
+ _ => cx.sess().bug(&format!("adt::represent_type called on non-ADT type: {}",
+ ty_to_string(cx.tcx(), t))[])
}
}
impl<'tcx> Case<'tcx> {
fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool {
- mk_struct(cx, self.tys.index(&FullRange), false, scapegoat).size == 0
+ mk_struct(cx, &self.tys[], false, scapegoat).size == 0
}
fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<DiscrField> {
.map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
};
- ensure_struct_fits_in_address_space(cx, lltys.index(&FullRange), packed, scapegoat);
+ ensure_struct_fits_in_address_space(cx, &lltys[], packed, scapegoat);
- let llty_rec = Type::struct_(cx, lltys.index(&FullRange), packed);
+ let llty_rec = Type::struct_(cx, &lltys[], packed);
Struct {
size: machine::llsize_of_alloc(cx, llty_rec),
align: machine::llalign_of_min(cx, llty_rec),
return ity;
}
attr::ReprExtern => {
- attempts = match cx.sess().target.target.arch.index(&FullRange) {
+ attempts = match &cx.sess().target.target.arch[] {
// WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32`
// appears to be used on Linux and NetBSD, but some systems may use the variant
// corresponding to `choose_shortest`. However, we don't run on those yet...?
match *r {
CEnum(..) | General(..) | RawNullablePointer { .. } => { }
Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } =>
- llty.set_struct_body(struct_llfields(cx, st, false, false).index(&FullRange),
+ llty.set_struct_body(&struct_llfields(cx, st, false, false)[],
st.packed)
}
}
Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => {
match name {
None => {
- Type::struct_(cx, struct_llfields(cx, st, sizing, dst).index(&FullRange),
+ Type::struct_(cx, &struct_llfields(cx, st, sizing, dst)[],
st.packed)
}
Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) }
// of the size.
//
// FIXME #10604: this breaks when vector types are present.
- let (size, align) = union_size_and_align(sts.index(&FullRange));
+ let (size, align) = union_size_and_align(&sts[]);
let align_s = align as u64;
let discr_ty = ll_inttype(cx, ity);
let discr_size = machine::llsize_of_alloc(cx, discr_ty);
Type::array(&discr_ty, align_s / discr_size - 1),
fill_ty];
match name {
- None => Type::struct_(cx, fields.index(&FullRange), false),
+ None => Type::struct_(cx, &fields[], false),
Some(name) => {
let mut llty = Type::named_struct(cx, name);
- llty.set_struct_body(fields.index(&FullRange), false);
+ llty.set_struct_body(&fields[], false);
llty
}
}
fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField,
scrutinee: ValueRef) -> ValueRef {
- let llptrptr = GEPi(bcx, scrutinee, discrfield.index(&FullRange));
+ let llptrptr = GEPi(bcx, scrutinee, &discrfield[]);
let llptr = Load(bcx, llptrptr);
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)))
}
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
if discr != nndiscr {
- let llptrptr = GEPi(bcx, val, discrfield.index(&FullRange));
+ let llptrptr = GEPi(bcx, val, &discrfield[]);
let llptrty = val_ty(llptrptr).element_type();
Store(bcx, C_null(llptrty), llptrptr)
}
let val = if needs_cast {
let ccx = bcx.ccx();
let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
- let real_ty = Type::struct_(ccx, fields.index(&FullRange), st.packed);
+ let real_ty = Type::struct_(ccx, &fields[], st.packed);
PointerCast(bcx, val, real_ty.ptr_to())
} else {
val
for (discr, case) in cases.iter().enumerate() {
let mut variant_cx = fcx.new_temp_block(
- format!("enum-variant-iter-{}", discr.to_string()).index(&FullRange)
+ &format!("enum-variant-iter-{}", &discr.to_string())[]
);
let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true);
AddCase(llswitch, rhs_val, variant_cx.llbb);
let fields = case.fields.iter().map(|&ty|
type_of::type_of(bcx.ccx(), ty)).collect::<Vec<_>>();
- let real_ty = Type::struct_(ccx, fields.index(&FullRange), case.packed);
+ let real_ty = Type::struct_(ccx, &fields[], case.packed);
let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to());
variant_cx = f(variant_cx, case, variant_value);
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
let mut f = vec![lldiscr];
f.push_all(vals);
- let mut contents = build_const_struct(ccx, case, f.index(&FullRange));
+ let mut contents = build_const_struct(ccx, case, &f[]);
contents.push_all(&[padding(ccx, max_sz - case.size)]);
- C_struct(ccx, contents.index(&FullRange), false)
+ C_struct(ccx, &contents[], false)
}
Univariant(ref st, _dro) => {
assert!(discr == 0);
let contents = build_const_struct(ccx, st, vals);
- C_struct(ccx, contents.index(&FullRange), st.packed)
+ C_struct(ccx, &contents[], st.packed)
}
RawNullablePointer { nndiscr, nnty, .. } => {
if discr == nndiscr {
}
StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
if discr == nndiscr {
- C_struct(ccx, build_const_struct(ccx,
+ C_struct(ccx, &build_const_struct(ccx,
nonnull,
- vals).index(&FullRange),
+ vals)[],
false)
} else {
let vals = nonnull.fields.iter().map(|&ty| {
// field; see #8506.
C_null(type_of::sizing_type_of(ccx, ty))
}).collect::<Vec<ValueRef>>();
- C_struct(ccx, build_const_struct(ccx,
+ C_struct(ccx, &build_const_struct(ccx,
nonnull,
- vals.index(&FullRange)).index(&FullRange),
+ &vals[])[],
false)
}
}
callee::DontAutorefArg)
})
}).collect::<Vec<_>>();
- inputs.push_all(ext_inputs.index(&FullRange));
+ inputs.push_all(&ext_inputs[]);
// no failure occurred preparing operands, no need to cleanup
fcx.pop_custom_cleanup_scope(temp_scope);
if !clobbers.is_empty() {
clobbers.push(',');
}
- clobbers.push_str(more_clobbers.index(&FullRange));
+ clobbers.push_str(&more_clobbers[]);
}
// Add the clobbers to our constraints list
if clobbers.len() != 0 && constraints.len() != 0 {
constraints.push(',');
- constraints.push_str(clobbers.index(&FullRange));
+ constraints.push_str(&clobbers[]);
} else {
- constraints.push_str(clobbers.index(&FullRange));
+ constraints.push_str(&clobbers[]);
}
- debug!("Asm Constraints: {}", constraints.index(&FullRange));
+ debug!("Asm Constraints: {}", &constraints[]);
let num_outputs = outputs.len();
} else if num_outputs == 1 {
output_types[0]
} else {
- Type::struct_(bcx.ccx(), output_types.index(&FullRange), false)
+ Type::struct_(bcx.ccx(), &output_types[], false)
};
let dialect = match ia.dialect {
let f = decl_rust_fn(ccx, fn_ty, name);
csearch::get_item_attrs(&ccx.sess().cstore, did, |attrs| {
- set_llvm_fn_attrs(ccx, attrs.index(&FullRange), f)
+ set_llvm_fn_attrs(ccx, &attrs[], f)
});
ccx.externs().borrow_mut().insert(name.to_string(), f);
match bcx.tcx().lang_items.require(it) {
Ok(id) => id,
Err(s) => {
- bcx.sess().fatal(format!("allocation of `{}` {}",
+ bcx.sess().fatal(&format!("allocation of `{}` {}",
bcx.ty_to_string(info_ty),
- s).index(&FullRange));
+ s)[]);
}
}
}
// silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) {
if ccx.all_llvm_symbols().borrow().contains(&sym) {
- ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).index(&FullRange));
+ ccx.sess().bug(&format!("duplicate LLVM symbol: {}", sym)[]);
}
ccx.all_llvm_symbols().borrow_mut().insert(sym);
}
ty::mk_nil(ccx.tcx()));
get_extern_fn(ccx,
&mut *ccx.externs().borrow_mut(),
- name.index(&FullRange),
+ &name[],
llvm::CCallConv,
llty,
dtor_ty)
for variant in (*variants).iter() {
let variant_cx =
fcx.new_temp_block(
- format!("enum-iter-variant-{}",
- variant.disr_val.to_string().index(&FullRange))
- .index(&FullRange));
+ &format!("enum-iter-variant-{}",
+ &variant.disr_val.to_string()[])
+ []);
match adt::trans_case(cx, &*repr, variant.disr_val) {
_match::SingleResult(r) => {
AddCase(llswitch, r.val, variant_cx.llbb)
}
}
_ => {
- cx.sess().unimpl(format!("type in iter_structural_ty: {}",
- ty_to_string(cx.tcx(), t)).index(&FullRange))
+ cx.sess().unimpl(&format!("type in iter_structural_ty: {}",
+ ty_to_string(cx.tcx(), t))[])
}
}
return cx;
(ICmp(cx, llvm::IntEQ, rhs, zero), false)
}
_ => {
- cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
- ty_to_string(cx.tcx(), rhs_t)).index(&FullRange));
+ cx.sess().bug(&format!("fail-if-zero on unexpected type: {}",
+ ty_to_string(cx.tcx(), rhs_t))[]);
}
};
let bcx = with_cond(cx, is_zero, |bcx| {
ty::ty_bare_fn(_, ref fn_ty) => {
match ccx.sess().target.target.adjust_abi(fn_ty.abi) {
Rust | RustCall => {
- get_extern_rust_fn(ccx, t, name.index(&FullRange), did)
+ get_extern_rust_fn(ccx, t, &name[], did)
}
RustIntrinsic => {
ccx.sess().bug("unexpected intrinsic in trans_external_path")
}
_ => {
foreign::register_foreign_item_fn(ccx, fn_ty.abi, t,
- name.index(&FullRange))
+ &name[])
}
}
}
let llresult = Invoke(bcx,
llfn,
- llargs.index(&FullRange),
+ &llargs[],
normal_bcx.llbb,
landing_pad,
Some(attributes));
None => debuginfo::clear_source_location(bcx.fcx)
};
- let llresult = Call(bcx, llfn, llargs.index(&FullRange), Some(attributes));
+ let llresult = Call(bcx, llfn, &llargs[], Some(attributes));
return (llresult, bcx);
}
}
pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) {
let _icx = push_ctxt("call_memcpy");
let ccx = cx.ccx();
- let key = match ccx.sess().target.target.target_word_size.index(&FullRange) {
+ let key = match &ccx.sess().target.target.target_word_size[] {
"32" => "llvm.memcpy.p0i8.p0i8.i32",
"64" => "llvm.memcpy.p0i8.p0i8.i64",
tws => panic!("Unsupported target word size for memcpy: {}", tws),
let llty = type_of::type_of(ccx, ty);
- let intrinsic_key = match ccx.sess().target.target.target_word_size.index(&FullRange) {
+ let intrinsic_key = match &ccx.sess().target.target.target_word_size[] {
"32" => "llvm.memset.p0i8.i32",
"64" => "llvm.memset.p0i8.i64",
tws => panic!("Unsupported target word size for memset: {}", tws),
"argtuple",
arg_scope_id));
let untupled_arg_types = match monomorphized_arg_types[0].sty {
- ty::ty_tup(ref types) => types.index(&FullRange),
+ ty::ty_tup(ref types) => &types[],
_ => {
bcx.tcx().sess.span_bug(args[0].pat.span,
"first arg to `rust-call` ABI function \
let arg_datums = if abi != RustCall {
create_datums_for_fn_args(&fcx,
- monomorphized_arg_types.index(&FullRange))
+ &monomorphized_arg_types[])
} else {
create_datums_for_fn_args_under_call_abi(
bcx,
arg_scope,
- monomorphized_arg_types.index(&FullRange))
+ &monomorphized_arg_types[])
};
bcx = match closure_env.kind {
copy_args_to_allocas(&fcx,
arg_scope,
bcx,
- decl.inputs.index(&FullRange),
+ &decl.inputs[],
arg_datums)
}
closure::UnboxedClosure(..) => {
copy_unboxed_closure_args_to_allocas(
bcx,
arg_scope,
- decl.inputs.index(&FullRange),
+ &decl.inputs[],
arg_datums,
- monomorphized_arg_types.index(&FullRange))
+ &monomorphized_arg_types[])
}
};
ty::erase_late_bound_regions(bcx.tcx(), &bft.sig.output()).unwrap()
}
_ => ccx.sess().bug(
- format!("trans_enum_variant_constructor: \
+ &format!("trans_enum_variant_constructor: \
unexpected ctor return type {}",
- ctor_ty.repr(tcx)).index(&FullRange))
+ ctor_ty.repr(tcx))[])
};
// Get location to store the result. If the user does not care about
bcx = expr::trans_adt(bcx,
result_ty,
disr,
- fields.index(&FullRange),
+ &fields[],
None,
expr::SaveIn(llresult),
call_info);
ty::erase_late_bound_regions(ccx.tcx(), &bft.sig.output())
}
_ => ccx.sess().bug(
- format!("trans_enum_variant_or_tuple_like_struct: \
+ &format!("trans_enum_variant_or_tuple_like_struct: \
unexpected ctor return type {}",
- ty_to_string(ccx.tcx(), ctor_ty)).index(&FullRange))
+ ty_to_string(ccx.tcx(), ctor_ty))[])
};
let arena = TypedArena::new();
ty::erase_late_bound_regions(
ccx.tcx(), &ty::ty_fn_args(ctor_ty));
- let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.index(&FullRange));
+ let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[]);
if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) {
let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot");
// pass for the latter already ran.
lint::raw_emit_lint(&ccx.tcx().sess, lint::builtin::VARIANT_SIZE_DIFFERENCES,
*lvlsrc.unwrap(), Some(sp),
- format!("enum variant is more than three times larger \
+ &format!("enum variant is more than three times larger \
({} bytes) than the next largest (ignoring padding)",
- largest).index(&FullRange));
+ largest)[]);
ccx.sess().span_note(enum_def.variants[largest_index].span,
"this variant is the largest");
match item.node {
ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => {
if !generics.is_type_parameterized() {
- let trans_everywhere = attr::requests_inline(item.attrs.index(&FullRange));
+ let trans_everywhere = attr::requests_inline(&item.attrs[]);
// Ignore `trans_everywhere` for cross-crate inlined items
// (`from_external`). `trans_item` will be called once for each
// compilation unit that references the item, so it will still get
foreign::trans_rust_fn_with_foreign_abi(ccx,
&**decl,
&**body,
- item.attrs.index(&FullRange),
+ &item.attrs[],
llfn,
&Substs::trans_empty(),
item.id,
llfn,
&Substs::trans_empty(),
item.id,
- item.attrs.index(&FullRange));
+ &item.attrs[]);
}
update_linkage(ccx,
llfn,
ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => {
meth::trans_impl(ccx,
item.ident,
- impl_items.index(&FullRange),
+ &impl_items[],
generics,
item.id);
}
// Do static_assert checking. It can't really be done much earlier
// because we need to get the value of the bool out of LLVM
- if attr::contains_name(item.attrs.index(&FullRange), "static_assert") {
+ if attr::contains_name(&item.attrs[], "static_assert") {
if m == ast::MutMutable {
ccx.sess().span_fatal(expr.span,
"cannot have static_assert on a mutable \
_ => panic!("expected bare rust fn")
};
- let llfn = decl_rust_fn(ccx, node_type, sym.index(&FullRange));
+ let llfn = decl_rust_fn(ccx, node_type, &sym[]);
finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn
}
match fn_sig.inputs[1].sty {
ty::ty_tup(ref t_in) => {
- inputs.push_all(t_in.index(&FullRange));
+ inputs.push_all(&t_in[]);
inputs
}
_ => ccx.sess().bug("expected tuple'd inputs")
debug!("register_fn_llvmty id={} sym={}", node_id, sym);
let llfn = decl_fn(ccx,
- sym.index(&FullRange),
+ &sym[],
cc,
llfty,
ty::FnConverging(ty::mk_nil(ccx.tcx())));
let (start_fn, args) = if use_start_lang_item {
let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) {
Ok(id) => id,
- Err(s) => { ccx.sess().fatal(s.index(&FullRange)); }
+ Err(s) => { ccx.sess().fatal(&s[]); }
};
let start_fn = if start_def_id.krate == ast::LOCAL_CRATE {
get_item_val(ccx, start_def_id.node)
let val = match item {
ast_map::NodeItem(i) => {
let ty = ty::node_id_to_type(ccx.tcx(), i.id);
- let sym = |&:| exported_name(ccx, id, ty, i.attrs.index(&FullRange));
+ let sym = |&:| exported_name(ccx, id, ty, &i.attrs[]);
let v = match i.node {
ast::ItemStatic(_, _, ref expr) => {
} else {
llvm::LLVMTypeOf(v)
};
- if contains_null(sym.index(&FullRange)) {
+ if contains_null(&sym[]) {
ccx.sess().fatal(
- format!("Illegal null byte in export_name \
- value: `{}`", sym).index(&FullRange));
+ &format!("Illegal null byte in export_name \
+ value: `{}`", sym)[]);
}
let buf = CString::from_slice(sym.as_bytes());
let g = llvm::LLVMAddGlobal(ccx.llmod(), llty,
buf.as_ptr());
- if attr::contains_name(i.attrs.index(&FullRange),
+ if attr::contains_name(&i.attrs[],
"thread_local") {
llvm::set_thread_local(g, true);
}
sym,
i.id)
};
- set_llvm_fn_attrs(ccx, i.attrs.index(&FullRange), llfn);
+ set_llvm_fn_attrs(ccx, &i.attrs[], llfn);
llfn
}
_ => panic!("get_item_val: weird result in table")
};
- match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange),
+ match attr::first_attr_value_str_by_name(&i.attrs[],
"link_section") {
Some(sect) => {
if contains_null(sect.get()) {
- ccx.sess().fatal(format!("Illegal null byte in link_section value: `{}`",
- sect.get()).index(&FullRange));
+ ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`",
+ sect.get())[]);
}
unsafe {
let buf = CString::from_slice(sect.get().as_bytes());
let abi = ccx.tcx().map.get_foreign_abi(id);
let ty = ty::node_id_to_type(ccx.tcx(), ni.id);
let name = foreign::link_name(&*ni);
- foreign::register_foreign_item_fn(ccx, abi, ty, name.get().index(&FullRange))
+ foreign::register_foreign_item_fn(ccx, abi, ty, &name.get()[])
}
ast::ForeignItemStatic(..) => {
foreign::register_static(ccx, &*ni)
let sym = exported_name(ccx,
id,
ty,
- enm.attrs.index(&FullRange));
+ &enm.attrs[]);
llfn = match enm.node {
ast::ItemEnum(_, _) => {
let sym = exported_name(ccx,
id,
ty,
- struct_item.attrs
- .index(&FullRange));
+ &struct_item.attrs[]);
let llfn = register_fn(ccx, struct_item.span,
sym, ctor_id, ty);
set_inline_hint(llfn);
}
ref variant => {
- ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}",
- variant).index(&FullRange))
+ ccx.sess().bug(&format!("get_item_val(): unexpected variant: {:?}",
+ variant)[])
}
};
m: &ast::Method) -> ValueRef {
let mty = ty::node_id_to_type(ccx.tcx(), id);
- let sym = exported_name(ccx, id, mty, m.attrs.index(&FullRange));
+ let sym = exported_name(ccx, id, mty, &m.attrs[]);
let llfn = register_fn(ccx, m.span, sym, id, mty);
- set_llvm_fn_attrs(ccx, m.attrs.index(&FullRange), llfn);
+ set_llvm_fn_attrs(ccx, &m.attrs[], llfn);
llfn
}
Some(compressed) => compressed,
None => cx.sess().fatal("failed to compress metadata"),
}.as_slice());
- let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed.index(&FullRange));
+ let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[]);
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
let name = format!("rust_metadata_{}_{}",
cx.link_meta().crate_name,
let link_meta = link::build_link_meta(&tcx.sess, krate, name);
let codegen_units = tcx.sess.opts.cg.codegen_units;
- let shared_ccx = SharedCrateContext::new(link_meta.crate_name.index(&FullRange),
+ let shared_ccx = SharedCrateContext::new(&link_meta.crate_name[],
codegen_units,
tcx,
export_map,
llmod: shared_ccx.metadata_llmod(),
};
let formats = shared_ccx.tcx().dependency_formats.borrow().clone();
- let no_builtins = attr::contains_name(krate.attrs.index(&FullRange), "no_builtins");
+ let no_builtins = attr::contains_name(&krate.attrs[], "no_builtins");
let translation = CrateTranslation {
modules: modules,
for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs.iter()) {
*small_vec_e = C_i32(self.ccx, ix as i32);
}
- self.inbounds_gep(base, small_vec.index(&(0..ixs.len())))
+ self.inbounds_gep(base, &small_vec[0..ixs.len()])
} else {
let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
self.count_insn("gepi");
- self.inbounds_gep(base, v.index(&FullRange))
+ self.inbounds_gep(base, &v[])
}
}
let s = format!("{} ({})",
text,
self.ccx.sess().codemap().span_to_string(sp));
- debug!("{}", s.index(&FullRange));
- self.add_comment(s.index(&FullRange));
+ debug!("{}", &s[]);
+ self.add_comment(&s[]);
}
}
}).collect::<Vec<_>>();
debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output));
- let fty = Type::func(argtys.index(&FullRange), &output);
+ let fty = Type::func(&argtys[], &output);
unsafe {
let v = llvm::LLVMInlineAsm(
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
atys: &[Type],
rty: Type,
ret_def: bool) -> FnType {
- match ccx.sess().target.target.arch.index(&FullRange) {
+ match &ccx.sess().target.target.arch[] {
"x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def),
"x86_64" => if ccx.sess().target.target.options.is_like_windows {
cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def)
"arm" => cabi_arm::compute_abi_info(ccx, atys, rty, ret_def),
"aarch64" => cabi_aarch64::compute_abi_info(ccx, atys, rty, ret_def),
"mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def),
- a => ccx.sess().fatal((format!("unrecognized arch \"{}\" in target specification", a))
- .index(&FullRange)),
+ a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a)
+ []),
}
}
tys.push(Type::i64(ccx));
}
SSEFv => {
- let vec_len = llvec_len(cls.index(&((i + 1u)..)));
+ let vec_len = llvec_len(&cls[(i + 1u)..]);
let vec_ty = Type::vector(&Type::f32(ccx), (vec_len * 2u) as u64);
tys.push(vec_ty);
i += vec_len;
_ => {
bcx.tcx().sess.span_bug(
expr.span,
- format!("type of callee is neither bare-fn nor closure: \
+ &format!("type of callee is neither bare-fn nor closure: \
{}",
- bcx.ty_to_string(datum.ty)).index(&FullRange));
+ bcx.ty_to_string(datum.ty))[]);
}
}
}
def::DefSelfTy(..) | def::DefAssociatedPath(..) => {
bcx.tcx().sess.span_bug(
ref_expr.span,
- format!("cannot translate def {:?} \
- to a callable thing!", def).index(&FullRange));
+ &format!("cannot translate def {:?} \
+ to a callable thing!", def)[]);
}
}
}
}
_ => {
- tcx.sess.bug(format!("trans_fn_pointer_shim invoked on invalid type: {}",
- bare_fn_ty.repr(tcx)).index(&FullRange));
+ tcx.sess.bug(&format!("trans_fn_pointer_shim invoked on invalid type: {}",
+ bare_fn_ty.repr(tcx))[]);
}
};
let sig = ty::erase_late_bound_regions(tcx, sig);
let llfn =
decl_internal_rust_fn(ccx,
tuple_fn_ty,
- function_name.index(&FullRange));
+ &function_name[]);
//
let block_arena = TypedArena::new();
None,
bare_fn_ty,
|bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) },
- ArgVals(llargs.index(&FullRange)),
+ ArgVals(&llargs[]),
dest).bcx;
finish_fn(&fcx, bcx, sig.output);
// Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx,
llfn,
- llargs.index(&FullRange),
+ &llargs[],
callee_ty,
call_info);
bcx = b;
bcx = foreign::trans_native_call(bcx, callee_ty,
llfn, opt_llretslot.unwrap(),
- llargs.index(&FullRange), arg_tys);
+ &llargs[], arg_tys);
}
fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope);
}
self.ccx.sess().bug(
- format!("no cleanup scope {} found",
- self.ccx.tcx().map.node_to_string(cleanup_scope)).index(&FullRange));
+ &format!("no cleanup scope {} found",
+ self.ccx.tcx().map.node_to_string(cleanup_scope))[]);
}
/// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope.
}
LoopExit(id, _) => {
- self.ccx.sess().bug(format!(
+ self.ccx.sess().bug(&format!(
"cannot exit from scope {}, \
- not in scope", id).index(&FullRange));
+ not in scope", id)[]);
}
}
}
let name = scope.block_name("clean");
debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(label.is_unwind(),
- name.index(&FullRange),
+ &name[],
None);
let mut bcx_out = bcx_in;
for cleanup in scope.cleanups.iter().rev() {
Some(llbb) => { return llbb; }
None => {
let name = last_scope.block_name("unwind");
- pad_bcx = self.new_block(true, name.index(&FullRange), None);
+ pad_bcx = self.new_block(true, &name[], None);
last_scope.cached_landing_pad = Some(pad_bcx.llbb);
}
}
r
}
None => {
- tcx.sess.bug(format!("no temporary scope available for expr {}",
- id).index(&FullRange))
+ tcx.sess.bug(&format!("no temporary scope available for expr {}",
+ id)[])
}
}
}
let tcx = ccx.tcx();
// compute the type of the closure
- let cdata_ty = mk_closure_tys(tcx, bound_values.index(&FullRange));
+ let cdata_ty = mk_closure_tys(tcx, &bound_values[]);
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
// tuple. This could be a ptr in uniq or a box or on stack,
debug!("Copy {} into closure", bv.to_string(ccx));
if ccx.sess().asm_comments() {
- add_comment(bcx, format!("Copy {} into closure",
- bv.to_string(ccx)).index(&FullRange));
+ add_comment(bcx, &format!("Copy {} into closure",
+ bv.to_string(ccx))[]);
}
let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]);
let s = tcx.map.with_path(id, |path| {
mangle_internal_name_by_path_and_seq(path, "closure")
});
- let llfn = decl_internal_rust_fn(ccx, fty, s.index(&FullRange));
+ let llfn = decl_internal_rust_fn(ccx, fty, &s[]);
// set an inline hint for all closures
set_inline_hint(llfn);
&[],
ty::erase_late_bound_regions(ccx.tcx(), &ty::ty_fn_ret(fty)),
ty::ty_fn_abi(fty),
- ClosureEnv::new(freevars.index(&FullRange),
+ ClosureEnv::new(&freevars[],
BoxedClosure(cdata_ty, store)));
fill_fn_pair(bcx, dest_addr, llfn, llbox);
bcx
mangle_internal_name_by_path_and_seq(path, "unboxed_closure")
});
- let llfn = decl_internal_rust_fn(ccx, function_type, symbol.index(&FullRange));
+ let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[]);
// set an inline hint for all closures
set_inline_hint(llfn);
&[],
sig.output,
function_type.abi,
- ClosureEnv::new(freevars.index(&FullRange),
+ ClosureEnv::new(&freevars[],
UnboxedClosure(freevar_mode)));
// Don't hoist this to the top of the function. It's perfectly legitimate
let num = token::gensym(name).uint();
// use one colon which will get translated to a period by the mangler, and
// we're guaranteed that `num` is globally unique for this crate.
-PathName(token::gensym(format!("{}:{}", name, num).index(&FullRange)))
+PathName(token::gensym(&format!("{}:{}", name, num)[]))
}
#[derive(Copy)]
match self.tcx().def_map.borrow().get(&nid) {
Some(v) => v.clone(),
None => {
- self.tcx().sess.bug(format!(
- "no def associated with node id {}", nid).index(&FullRange));
+ self.tcx().sess.bug(&format!(
+ "no def associated with node id {}", nid)[]);
}
}
}
Err(e) => {
tcx.sess.span_bug(
span,
- format!("Encountered error `{}` selecting `{}` during trans",
+ &format!("Encountered error `{}` selecting `{}` during trans",
e.repr(tcx),
- trait_ref.repr(tcx)).index(&FullRange))
+ trait_ref.repr(tcx))[])
}
};
} else {
infcx.tcx.sess.span_bug(
span,
- format!("Encountered errors `{}` fulfilling during trans",
- errors.repr(infcx.tcx)).index(&FullRange));
+ &format!("Encountered errors `{}` fulfilling during trans",
+ errors.repr(infcx.tcx))[]);
}
}
}
};
if substs.types.any(|t| ty::type_needs_infer(*t)) {
- tcx.sess.bug(format!("type parameters for node {:?} include inference types: {:?}",
- node, substs.repr(tcx)).index(&FullRange));
+ tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}",
+ node, substs.repr(tcx))[]);
}
monomorphize::apply_param_substs(tcx,
Err(s) => {
let msg = format!("{} {}", msg, s);
match span {
- Some(span) => bcx.tcx().sess.span_fatal(span, msg.index(&FullRange)),
- None => bcx.tcx().sess.fatal(msg.index(&FullRange)),
+ Some(span) => bcx.tcx().sess.span_fatal(span, &msg[]),
+ None => bcx.tcx().sess.fatal(&msg[]),
}
}
}
C_integral(Type::uint_from_ty(cx, t), i as u64, false)
}
_ => cx.sess().span_bug(lit.span,
- format!("integer literal has type {} (expected int \
+ &format!("integer literal has type {} (expected int \
or uint)",
- ty_to_string(cx.tcx(), lit_int_ty)).index(&FullRange))
+ ty_to_string(cx.tcx(), lit_int_ty))[])
}
}
ast::LitFloat(ref fs, t) => {
}
ast::LitBool(b) => C_bool(cx, b),
ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
- ast::LitBinary(ref data) => C_binary_slice(cx, data.index(&FullRange)),
+ ast::LitBinary(ref data) => C_binary_slice(cx, &data[]),
}
}
.collect::<Vec<_>>();
// If the vector contains enums, an LLVM array won't work.
let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
- C_struct(cx, vs.index(&FullRange), false)
+ C_struct(cx, &vs[], false)
} else {
- C_array(llunitty, vs.index(&FullRange))
+ C_array(llunitty, &vs[])
};
(v, llunitty)
}
(const_deref_newtype(cx, v, t), mt.ty)
}
_ => {
- cx.sess().bug(format!("unexpected dereferenceable type {}",
- ty_to_string(cx.tcx(), t)).index(&FullRange))
+ cx.sess().bug(&format!("unexpected dereferenceable type {}",
+ ty_to_string(cx.tcx(), t))[])
}
}
}
None => {
- cx.sess().bug(format!("cannot dereference const of type {}",
- ty_to_string(cx.tcx(), t)).index(&FullRange))
+ cx.sess().bug(&format!("cannot dereference const of type {}",
+ ty_to_string(cx.tcx(), t))[])
}
}
}
], false);
}
_ => cx.sess().span_bug(e.span,
- format!("unimplemented type in const unsize: {}",
- ty_to_string(cx.tcx(), ty)).index(&FullRange))
+ &format!("unimplemented type in const unsize: {}",
+ ty_to_string(cx.tcx(), ty))[])
}
}
_ => {
cx.sess()
.span_bug(e.span,
- format!("unimplemented const \
+ &format!("unimplemented const \
autoref {:?}",
- autoref).index(&FullRange))
+ autoref)[])
}
}
}
llvm::LLVMDumpValue(llconst);
llvm::LLVMDumpValue(C_undef(llty));
}
- cx.sess().bug(format!("const {} of type {} has size {} instead of {}",
+ cx.sess().bug(&format!("const {} of type {} has size {} instead of {}",
e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety),
- csize, tsize).index(&FullRange));
+ csize, tsize)[]);
}
(llconst, ety_adjusted)
}
(const_deref_ptr(cx, e1), const_get_elt(cx, bv, &[1]))
},
_ => cx.sess().span_bug(base.span,
- format!("index-expr base must be a vector \
+ &format!("index-expr base must be a vector \
or string type, found {}",
- ty_to_string(cx.tcx(), bt)).index(&FullRange))
+ ty_to_string(cx.tcx(), bt))[])
},
ty::ty_rptr(_, mt) => match mt.ty.sty {
ty::ty_vec(_, Some(u)) => {
(const_deref_ptr(cx, bv), C_uint(cx, u))
},
_ => cx.sess().span_bug(base.span,
- format!("index-expr base must be a vector \
+ &format!("index-expr base must be a vector \
or string type, found {}",
- ty_to_string(cx.tcx(), bt)).index(&FullRange))
+ ty_to_string(cx.tcx(), bt))[])
},
_ => cx.sess().span_bug(base.span,
- format!("index-expr base must be a vector \
+ &format!("index-expr base must be a vector \
or string type, found {}",
- ty_to_string(cx.tcx(), bt)).index(&FullRange))
+ ty_to_string(cx.tcx(), bt))[])
};
let len = llvm::LLVMConstIntGetZExtValue(len) as u64;
ast::ExprTup(ref es) => {
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
- let vals = map_list(es.index(&FullRange));
- adt::trans_const(cx, &*repr, 0, vals.index(&FullRange))
+ let vals = map_list(&es[]);
+ adt::trans_const(cx, &*repr, 0, &vals[])
}
ast::ExprStruct(_, ref fs, ref base_opt) => {
let ety = ty::expr_ty(cx.tcx(), e);
}
}
}).collect::<Vec<_>>();
- adt::trans_const(cx, &*repr, discr, cs.index(&FullRange))
+ adt::trans_const(cx, &*repr, discr, &cs[])
})
}
ast::ExprVec(ref es) => {
};
let vs: Vec<_> = repeat(const_expr(cx, &**elem).0).take(n).collect();
if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
- C_struct(cx, vs.index(&FullRange), false)
+ C_struct(cx, &vs[], false)
} else {
- C_array(llunitty, vs.index(&FullRange))
+ C_array(llunitty, &vs[])
}
}
ast::ExprPath(_) => {
Some(def::DefStruct(_)) => {
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
- let arg_vals = map_list(args.index(&FullRange));
- adt::trans_const(cx, &*repr, 0, arg_vals.index(&FullRange))
+ let arg_vals = map_list(&args[]);
+ adt::trans_const(cx, &*repr, 0, &arg_vals[])
}
Some(def::DefVariant(enum_did, variant_did, _)) => {
let ety = ty::expr_ty(cx.tcx(), e);
let vinfo = ty::enum_variant_with_id(cx.tcx(),
enum_did,
variant_did);
- let arg_vals = map_list(args.index(&FullRange));
+ let arg_vals = map_list(&args[]);
adt::trans_const(cx,
&*repr,
vinfo.disr_val,
- arg_vals.index(&FullRange))
+ &arg_vals[])
}
_ => cx.sess().span_bug(e.span, "expected a struct or variant def")
}
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let llmod_id = format!("{}.{}.rs", crate_name, i);
- let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.index(&FullRange));
+ let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[]);
shared_ccx.local_ccxs.push(local_ccx);
}
unsafe {
let (llcx, llmod) = create_context_and_module(&shared.tcx.sess, name);
- let td = mk_target_data(shared.tcx
+ let td = mk_target_data(&shared.tcx
.sess
.target
.target
.data_layout
- .index(&FullRange));
+ []);
let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo {
Some(debuginfo::CrateDebugContext::new(llmod))
/// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable
/// address space on 64-bit ARMv8 and x86_64.
pub fn obj_size_bound(&self) -> u64 {
- match self.sess().target.target.target_word_size.index(&FullRange) {
+ match &self.sess().target.target.target_word_size[] {
"32" => 1 << 31,
"64" => 1 << 47,
_ => unreachable!() // error handled by config::build_target_config
pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! {
self.sess().fatal(
- format!("the type `{}` is too big for the current architecture",
- obj.repr(self.tcx())).index(&FullRange))
+ &format!("the type `{}` is too big for the current architecture",
+ obj.repr(self.tcx()))[])
}
}
debug!("trans_stmt({})", s.repr(cx.tcx()));
if cx.sess().asm_comments() {
- add_span_comment(cx, s.span, s.repr(cx.tcx()).index(&FullRange));
+ add_span_comment(cx, s.span, &s.repr(cx.tcx())[]);
}
let mut bcx = cx;
}
let name = format!("then-block-{}-", thn.id);
- let then_bcx_in = bcx.fcx.new_id_block(name.index(&FullRange), thn.id);
+ let then_bcx_in = bcx.fcx.new_id_block(&name[], thn.id);
let then_bcx_out = trans_block(then_bcx_in, &*thn, dest);
trans::debuginfo::clear_source_location(bcx.fcx);
match bcx.tcx().def_map.borrow().get(&expr_id) {
Some(&def::DefLabel(loop_id)) => loop_id,
ref r => {
- bcx.tcx().sess.bug(format!("{:?} in def-map for label",
- r).index(&FullRange))
+ bcx.tcx().sess.bug(&format!("{:?} in def-map for label",
+ r)[])
}
}
}
let v_str = C_str_slice(ccx, fail_str);
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
- let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange));
+ let filename = token::intern_and_get_ident(&loc.file.name[]);
let filename = C_str_slice(ccx, filename);
let line = C_uint(ccx, loc.line);
let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false);
let did = langcall(bcx, Some(sp), "", PanicFnLangItem);
let bcx = callee::trans_lang_call(bcx,
did,
- args.index(&FullRange),
+ &args[],
Some(expr::Ignore)).bcx;
Unreachable(bcx);
return bcx;
// Extract the file/line from the span
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
- let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange));
+ let filename = token::intern_and_get_ident(&loc.file.name[]);
// Invoke the lang item
let filename = C_str_slice(ccx, filename);
let did = langcall(bcx, Some(sp), "", PanicBoundsCheckFnLangItem);
let bcx = callee::trans_lang_call(bcx,
did,
- args.index(&FullRange),
+ &args[],
Some(expr::Ignore)).bcx;
Unreachable(bcx);
return bcx;
gep(base)
}
_ => bcx.tcx().sess.bug(
- format!("Unexpected unsized type in get_element: {}",
- bcx.ty_to_string(self.ty)).index(&FullRange))
+ &format!("Unexpected unsized type in get_element: {}",
+ bcx.ty_to_string(self.ty))[])
};
Datum {
val: val,
type_: Ty<'tcx>,
metadata: DIType) {
if self.type_to_metadata.insert(type_, metadata).is_some() {
- cx.sess().bug(format!("Type metadata for Ty '{}' is already in the TypeMap!",
- ppaux::ty_to_string(cx.tcx(), type_)).index(&FullRange));
+ cx.sess().bug(&format!("Type metadata for Ty '{}' is already in the TypeMap!",
+ ppaux::ty_to_string(cx.tcx(), type_))[]);
}
}
metadata: DIType) {
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id);
- cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!",
- unique_type_id_str.index(&FullRange)).index(&FullRange));
+ cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!",
+ &unique_type_id_str[])[]);
}
}
// unique ptr (~) -> {~ :pointee-uid:}
// @-ptr (@) -> {@ :pointee-uid:}
// sized vec ([T; x]) -> {[:size:] :element-uid:}
- // unsized vec ([T]) -> {.index(&FullRange) :element-uid:}
+ // unsized vec ([T]) -> {[] :element-uid:}
// trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> }
// closure -> {<unsafe_> <once_> :store-sigil: |(:param-uid:),* <,_...>| -> \
// :return-type-uid: : (:bounds:)*}
// function -> {<unsafe_> <abi_> fn( (:param-uid:)* <,_...> ) -> \
// :return-type-uid:}
- // unique vec box (~.index(&FullRange)) -> {HEAP_VEC_BOX<:pointee-uid:>}
+ // unique vec box (~[]) -> {HEAP_VEC_BOX<:pointee-uid:>}
// gc box -> {GC_BOX<:pointee-uid:>}
match self.type_to_unique_id.get(&type_).cloned() {
self.get_unique_type_id_of_type(cx, component_type);
let component_type_id =
self.get_unique_type_id_as_string(component_type_id);
- unique_type_id.push_str(component_type_id.index(&FullRange));
+ unique_type_id.push_str(&component_type_id[]);
}
},
ty::ty_uniq(inner_type) => {
unique_type_id.push('~');
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(inner_type_id.index(&FullRange));
+ unique_type_id.push_str(&inner_type_id[]);
},
ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => {
unique_type_id.push('*');
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(inner_type_id.index(&FullRange));
+ unique_type_id.push_str(&inner_type_id[]);
},
ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => {
unique_type_id.push('&');
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(inner_type_id.index(&FullRange));
+ unique_type_id.push_str(&inner_type_id[]);
},
ty::ty_vec(inner_type, optional_length) => {
match optional_length {
Some(len) => {
- unique_type_id.push_str(format!("[{}]", len).index(&FullRange));
+ unique_type_id.push_str(&format!("[{}]", len)[]);
}
None => {
unique_type_id.push_str("[]");
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
- unique_type_id.push_str(inner_type_id.index(&FullRange));
+ unique_type_id.push_str(&inner_type_id[]);
},
ty::ty_trait(ref trait_data) => {
unique_type_id.push_str("trait ");
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
self.get_unique_type_id_as_string(parameter_type_id);
- unique_type_id.push_str(parameter_type_id.index(&FullRange));
+ unique_type_id.push_str(¶meter_type_id[]);
unique_type_id.push(',');
}
ty::FnConverging(ret_ty) => {
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
- unique_type_id.push_str(return_type_id.index(&FullRange));
+ unique_type_id.push_str(&return_type_id[]);
}
ty::FnDiverging => {
unique_type_id.push_str("!");
&mut unique_type_id);
},
_ => {
- cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}",
- ppaux::ty_to_string(cx.tcx(), type_).index(&FullRange),
- type_.sty).index(&FullRange))
+ cx.sess().bug(&format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}",
+ &ppaux::ty_to_string(cx.tcx(), type_)[],
+ type_.sty)[])
}
};
output.push_str(crate_hash.as_str());
output.push_str("/");
- output.push_str(format!("{:x}", def_id.node).index(&FullRange));
+ output.push_str(&format!("{:x}", def_id.node)[]);
// Maybe check that there is no self type here.
type_map.get_unique_type_id_of_type(cx, type_parameter);
let param_type_id =
type_map.get_unique_type_id_as_string(param_type_id);
- output.push_str(param_type_id.index(&FullRange));
+ output.push_str(¶m_type_id[]);
output.push(',');
}
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
self.get_unique_type_id_as_string(parameter_type_id);
- unique_type_id.push_str(parameter_type_id.index(&FullRange));
+ unique_type_id.push_str(¶meter_type_id[]);
unique_type_id.push(',');
}
ty::FnConverging(ret_ty) => {
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
- unique_type_id.push_str(return_type_id.index(&FullRange));
+ unique_type_id.push_str(&return_type_id[]);
}
ty::FnDiverging => {
unique_type_id.push_str("!");
-> UniqueTypeId {
let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type);
let enum_variant_type_id = format!("{}::{}",
- self.get_unique_type_id_as_string(enum_type_id)
- .index(&FullRange),
+ &self.get_unique_type_id_as_string(enum_type_id)[],
variant_name);
let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id));
UniqueTypeId(interner_key)
_ => {
cx.sess()
.span_bug(item.span,
- format!("debuginfo::\
+ &format!("debuginfo::\
create_global_var_metadata() -
Captured var-id refers to \
unexpected ast_item variant: {:?}",
- var_item).index(&FullRange))
+ var_item)[])
}
}
},
- _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \
+ _ => cx.sess().bug(&format!("debuginfo::create_global_var_metadata() \
- Captured var-id refers to unexpected \
ast_map variant: {:?}",
- var_item).index(&FullRange))
+ var_item)[])
};
let (file_metadata, line_number) = if span != codemap::DUMMY_SP {
let loc = span_start(cx, span);
- (file_metadata(cx, loc.file.name.index(&FullRange)), loc.line as c_uint)
+ (file_metadata(cx, &loc.file.name[]), loc.line as c_uint)
} else {
(UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER)
};
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
let var_name = token::get_ident(ident).get().to_string();
let linkage_name =
- namespace_node.mangled_name_of_contained_item(var_name.index(&FullRange));
+ namespace_node.mangled_name_of_contained_item(&var_name[]);
let var_scope = namespace_node.scope;
let var_name = CString::from_slice(var_name.as_bytes());
Some(datum) => datum,
None => {
bcx.sess().span_bug(span,
- format!("no entry in lllocals table for {}",
- node_id).index(&FullRange));
+ &format!("no entry in lllocals table for {}",
+ node_id)[]);
}
};
_ => {
cx.sess()
.span_bug(span,
- format!(
+ &format!(
"debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected \
ast_map variant: {:?}",
- ast_item).index(&FullRange));
+ ast_item)[]);
}
}
}
_ => {
cx.sess()
.span_bug(span,
- format!("debuginfo::create_captured_var_metadata() - \
+ &format!("debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected \
ast_map variant: {:?}",
- ast_item).index(&FullRange));
+ ast_item)[]);
}
};
let variable_access = IndirectVariable {
alloca: env_pointer,
- address_operations: address_operations.index(&(0..address_op_count))
+ address_operations: &address_operations[0..address_op_count]
};
declare_local(bcx,
Some(v) => v,
None => {
bcx.sess().span_bug(span,
- format!("no entry in lllocals table for {}",
- node_id).index(&FullRange));
+ &format!("no entry in lllocals table for {}",
+ node_id)[]);
}
};
if let Some(code_snippet) = code_snippet {
let bytes = code_snippet.as_bytes();
- if bytes.len() > 0 && bytes.index(&((bytes.len()-1)..)) == b"}" {
+ if bytes.len() > 0 && &bytes[(bytes.len()-1)..] == b"}" {
cleanup_span = Span {
lo: node_span.hi - codemap::BytePos(1),
hi: node_span.hi,
match expr.node {
ast::ExprClosure(_, _, ref fn_decl, ref top_level_block) => {
let name = format!("fn{}", token::gensym("fn"));
- let name = token::str_to_ident(name.index(&FullRange));
+ let name = token::str_to_ident(&name[]);
(name, &**fn_decl,
// This is not quite right. It should actually inherit
// the generics of the enclosing function.
}
_ => {
cx.sess()
- .bug(format!("create_function_debug_context: \
+ .bug(&format!("create_function_debug_context: \
unexpected sort of node: {:?}",
- fnitem).index(&FullRange))
+ fnitem)[])
}
}
}
ast_map::NodeStructCtor(..) => {
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
- _ => cx.sess().bug(format!("create_function_debug_context: \
+ _ => cx.sess().bug(&format!("create_function_debug_context: \
unexpected sort of node: {:?}",
- fnitem).index(&FullRange))
+ fnitem)[])
};
// This can be the case for functions inlined from another crate
}
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange));
+ let file_metadata = file_metadata(cx, &loc.file.name[]);
let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx,
let (linkage_name, containing_scope) = if has_path {
let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id));
let linkage_name = namespace_node.mangled_name_of_contained_item(
- function_name.index(&FullRange));
+ &function_name[]);
let containing_scope = namespace_node.scope;
(linkage_name, containing_scope)
} else {
signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP));
}
- return create_DIArray(DIB(cx), signature.index(&FullRange));
+ return create_DIArray(DIB(cx), &signature[]);
}
fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
actual_self_type,
true);
- name_to_append_suffix_to.push_str(actual_self_type_name.index(&FullRange));
+ name_to_append_suffix_to.push_str(&actual_self_type_name[]);
if generics.is_type_parameterized() {
name_to_append_suffix_to.push_str(",");
let actual_type_name = compute_debuginfo_type_name(cx,
actual_type,
true);
- name_to_append_suffix_to.push_str(actual_type_name.index(&FullRange));
+ name_to_append_suffix_to.push_str(&actual_type_name[]);
if index != generics.ty_params.len() - 1 {
name_to_append_suffix_to.push_str(",");
name_to_append_suffix_to.push('>');
- return create_DIArray(DIB(cx), template_params.index(&FullRange));
+ return create_DIArray(DIB(cx), &template_params[]);
}
}
let cx: &CrateContext = bcx.ccx();
let filename = span_start(cx, span).file.name.clone();
- let file_metadata = file_metadata(cx, filename.index(&FullRange));
+ let file_metadata = file_metadata(cx, &filename[]);
let name = token::get_ident(variable_ident);
let loc = span_start(cx, span);
let work_dir = cx.sess().working_dir.as_str().unwrap();
let file_name =
if full_path.starts_with(work_dir) {
- full_path.index(&((work_dir.len() + 1u)..full_path.len()))
+ &full_path[(work_dir.len() + 1u)..full_path.len()]
} else {
full_path
};
let node = fcx.ccx.tcx().map.get(node_id);
fcx.ccx.sess().span_bug(error_reporting_span,
- format!("debuginfo: Could not find scope info for node {:?}",
- node).index(&FullRange));
+ &format!("debuginfo: Could not find scope info for node {:?}",
+ node)[]);
}
}
}
let type_map = debug_context(cx).type_map.borrow();
if type_map.find_metadata_for_unique_id(unique_type_id).is_none() ||
type_map.find_metadata_for_type(unfinished_type).is_none() {
- cx.sess().bug(format!("Forward declaration of potentially recursive type \
+ cx.sess().bug(&format!("Forward declaration of potentially recursive type \
'{}' was not found in TypeMap!",
ppaux::ty_to_string(cx.tcx(), unfinished_type))
- .index(&FullRange));
+ []);
}
}
set_members_of_composite_type(cx,
metadata_stub,
llvm_type,
- member_descriptions.index(&FullRange));
+ &member_descriptions[]);
return MetadataCreationResult::new(metadata_stub, true);
}
}
let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type,
- struct_name.index(&FullRange),
+ &struct_name[],
unique_type_id,
containing_scope);
unique_type_id,
create_struct_stub(cx,
tuple_llvm_type,
- tuple_name.index(&FullRange),
+ &tuple_name[],
unique_type_id,
UNKNOWN_SCOPE_METADATA),
tuple_llvm_type,
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
- member_descriptions.index(&FullRange));
+ &member_descriptions[]);
MemberDescription {
name: "".to_string(),
llvm_type: variant_llvm_type,
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
- member_descriptions.index(&FullRange));
+ &member_descriptions[]);
vec![
MemberDescription {
name: "".to_string(),
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
- variant_member_descriptions.index(&FullRange));
+ &variant_member_descriptions[]);
// Encode the information about the null variant in the union
// member's name.
span: Span)
-> (DICompositeType, Type, MemberDescriptionFactory<'tcx>) {
let variant_llvm_type =
- Type::struct_(cx, struct_def.fields
+ Type::struct_(cx, &struct_def.fields
.iter()
.map(|&t| type_of::type_of(cx, t))
.collect::<Vec<_>>()
- .index(&FullRange),
+ [],
struct_def.packed);
// Could do some consistency checks here: size, align, field count, discr type
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
let loc = span_start(cx, definition_span);
- let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange));
+ let file_metadata = file_metadata(cx, &loc.file.name[]);
let variants = ty::enum_variants(cx.tcx(), enum_def_id);
let min_supported_llvm_version = 3 * 1000000 + 4 * 1000;
if actual_llvm_version < min_supported_llvm_version {
- cx.sess().warn(format!("This version of rustc was built with LLVM \
+ cx.sess().warn(&format!("This version of rustc was built with LLVM \
{}.{}. Rustc just ran into a known \
debuginfo corruption problem thatoften \
occurs with LLVM versions below 3.4. \
Please use a rustc built with anewer \
version of LLVM.",
llvm_version_major,
- llvm_version_minor).index(&FullRange));
+ llvm_version_minor)[]);
} else {
cx.sess().bug("debuginfo::set_members_of_composite_type() - \
Already completed forward declaration re-encountered.");
.collect();
unsafe {
- let type_array = create_DIArray(DIB(cx), member_metadata.index(&FullRange));
+ let type_array = create_DIArray(DIB(cx), &member_metadata[]);
llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array);
}
}
let member_llvm_types = slice_llvm_type.field_types();
assert!(slice_layout_is_correct(cx,
- member_llvm_types.index(&FullRange),
+ &member_llvm_types[],
element_type));
let member_descriptions = [
MemberDescription {
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange));
+ let file_metadata = file_metadata(cx, &loc.file.name[]);
let metadata = composite_type_metadata(cx,
slice_llvm_type,
- slice_type_name.index(&FullRange),
+ &slice_type_name[],
unique_type_id,
&member_descriptions,
UNKNOWN_SCOPE_METADATA,
llvm::LLVMDIBuilderCreateSubroutineType(
DIB(cx),
UNKNOWN_FILE_METADATA,
- create_DIArray(DIB(cx), signature_metadata.index(&FullRange)))
+ create_DIArray(DIB(cx), &signature_metadata[]))
},
false);
}
ty::ty_trait(ref data) => data.principal_def_id(),
_ => {
let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type);
- cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \
+ cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \
trait_pointer_metadata(): {}",
- pp_type_name.index(&FullRange)).index(&FullRange));
+ &pp_type_name[])[]);
}
};
composite_type_metadata(cx,
trait_llvm_type,
- trait_type_name.index(&FullRange),
+ &trait_type_name[],
unique_type_id,
&[],
containing_scope,
ty::ty_tup(ref elements) => {
prepare_tuple_metadata(cx,
t,
- elements.index(&FullRange),
+ &elements[],
unique_type_id,
usage_site_span).finalize(cx)
}
_ => {
- cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {:?}",
- sty).index(&FullRange))
+ cx.sess().bug(&format!("debuginfo: unexpected type in type_metadata: {:?}",
+ sty)[])
}
};
type id '{}' to already be in \
the debuginfo::TypeMap but it \
was not. (Ty = {})",
- unique_type_id_str.index(&FullRange),
+ &unique_type_id_str[],
ppaux::ty_to_string(cx.tcx(), t));
- cx.sess().span_bug(usage_site_span, error_message.index(&FullRange));
+ cx.sess().span_bug(usage_site_span, &error_message[]);
}
};
UniqueTypeId maps in \
debuginfo::TypeMap. \
UniqueTypeId={}, Ty={}",
- unique_type_id_str.index(&FullRange),
+ &unique_type_id_str[],
ppaux::ty_to_string(cx.tcx(), t));
- cx.sess().span_bug(usage_site_span, error_message.index(&FullRange));
+ cx.sess().span_bug(usage_site_span, &error_message[]);
}
}
None => {
{
// Create a new lexical scope and push it onto the stack
let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
- let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange));
+ let file_metadata = file_metadata(cx, &loc.file.name[]);
let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe {
if need_new_scope {
// Create a new lexical scope and push it onto the stack
let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo);
- let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange));
+ let file_metadata = file_metadata(cx, &loc.file.name[]);
let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe {
ty::ty_open(_) |
ty::ty_projection(..) |
ty::ty_param(_) => {
- cx.sess().bug(format!("debuginfo: Trying to create type name for \
- unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange));
+ cx.sess().bug(&format!("debuginfo: Trying to create type name for \
+ unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]);
}
}
None => {}
}
let string = token::get_name(node.name);
- output.push_str(format!("{}", string.get().len()).index(&FullRange));
+ output.push_str(&format!("{}", string.get().len())[]);
output.push_str(string.get());
}
let mut name = String::from_str("_ZN");
fill_nested(self, &mut name);
- name.push_str(format!("{}", item_name.len()).index(&FullRange));
+ name.push_str(&format!("{}", item_name.len())[]);
name.push_str(item_name);
name.push('E');
name
}
fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str {
- cx.link_meta().crate_name.index(&FullRange)
+ &cx.link_meta().crate_name[]
}
fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTreeNode> {
match parent_node {
Some(node) => node,
None => {
- cx.sess().bug(format!("debuginfo::namespace_for_item(): \
+ cx.sess().bug(&format!("debuginfo::namespace_for_item(): \
path too short for {:?}",
- def_id).index(&FullRange));
+ def_id)[]);
}
}
})
// should just be the identity function.
unsized_info(bcx, k, id, ty_substs[tp_index], identity)
}
- _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}",
- bcx.ty_to_string(unadjusted_ty)).index(&FullRange))
+ _ => bcx.sess().bug(&format!("UnsizeStruct with bad sty: {}",
+ bcx.ty_to_string(unadjusted_ty))[])
},
&ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
// Note that we preserve binding levels here:
let datum_ty = datum.ty;
let unboxed_ty = match datum_ty.sty {
ty::ty_uniq(t) => t,
- _ => bcx.sess().bug(format!("Expected ty_uniq, found {}",
- bcx.ty_to_string(datum_ty)).index(&FullRange))
+ _ => bcx.sess().bug(&format!("Expected ty_uniq, found {}",
+ bcx.ty_to_string(datum_ty))[])
};
let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
_ => {
bcx.tcx().sess.span_bug(
expr.span,
- format!("trans_rvalue_datum_unadjusted reached \
+ &format!("trans_rvalue_datum_unadjusted reached \
fall-through case: {:?}",
- expr.node).index(&FullRange));
+ expr.node)[]);
}
}
}
_ => {
bcx.tcx().sess.span_bug(
expr.span,
- format!("trans_rvalue_stmt_unadjusted reached \
+ &format!("trans_rvalue_stmt_unadjusted reached \
fall-through case: {:?}",
- expr.node).index(&FullRange));
+ expr.node)[]);
}
}
}
controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
}
ast::ExprMatch(ref discr, ref arms, _) => {
- _match::trans_match(bcx, expr, &**discr, arms.index(&FullRange), dest)
+ _match::trans_match(bcx, expr, &**discr, &arms[], dest)
}
ast::ExprBlock(ref blk) => {
controlflow::trans_block(bcx, &**blk, dest)
}
ast::ExprStruct(_, ref fields, ref base) => {
trans_struct(bcx,
- fields.index(&FullRange),
+ &fields[],
base.as_ref().map(|e| &**e),
expr.span,
expr.id,
trans_adt(bcx,
expr_ty(bcx, expr),
0,
- numbered_fields.index(&FullRange),
+ &numbered_fields[],
None,
dest,
Some(NodeInfo { id: expr.id, span: expr.span }))
trans_overloaded_call(bcx,
expr,
&**f,
- args.index(&FullRange),
+ &args[],
Some(dest))
} else {
callee::trans_call(bcx,
expr,
&**f,
- callee::ArgExprs(args.index(&FullRange)),
+ callee::ArgExprs(&args[]),
dest)
}
}
callee::trans_method_call(bcx,
expr,
&*args[0],
- callee::ArgExprs(args.index(&FullRange)),
+ callee::ArgExprs(&args[]),
dest)
}
ast::ExprBinary(op, ref lhs, ref rhs) => {
_ => {
bcx.tcx().sess.span_bug(
expr.span,
- format!("trans_rvalue_dps_unadjusted reached fall-through \
+ &format!("trans_rvalue_dps_unadjusted reached fall-through \
case: {:?}",
- expr.node).index(&FullRange));
+ expr.node)[]);
}
}
}
bcx
}
_ => {
- bcx.tcx().sess.span_bug(ref_expr.span, format!(
+ bcx.tcx().sess.span_bug(ref_expr.span, &format!(
"Non-DPS def {:?} referened by {}",
- def, bcx.node_id_to_string(ref_expr.id)).index(&FullRange));
+ def, bcx.node_id_to_string(ref_expr.id))[]);
}
}
}
param_substs)
}
_ => {
- ccx.tcx().sess.span_bug(ref_expr.span, format!(
+ ccx.tcx().sess.span_bug(ref_expr.span, &format!(
"trans_def_fn_unadjusted invoked on: {:?} for {}",
def,
- ref_expr.repr(ccx.tcx())).index(&FullRange));
+ ref_expr.repr(ccx.tcx()))[]);
}
}
}
match bcx.fcx.llupvars.borrow().get(&nid) {
Some(&val) => Datum::new(val, local_ty, Lvalue),
None => {
- bcx.sess().bug(format!(
+ bcx.sess().bug(&format!(
"trans_local_var: no llval for upvar {} found",
- nid).index(&FullRange));
+ nid)[]);
}
}
}
let datum = match bcx.fcx.lllocals.borrow().get(&nid) {
Some(&v) => v,
None => {
- bcx.sess().bug(format!(
+ bcx.sess().bug(&format!(
"trans_local_var: no datum for local/arg {} found",
- nid).index(&FullRange));
+ nid)[]);
}
};
debug!("take_local(nid={}, v={}, ty={})",
datum
}
_ => {
- bcx.sess().unimpl(format!(
+ bcx.sess().unimpl(&format!(
"unsupported def type in trans_local_var: {:?}",
- def).index(&FullRange));
+ def)[]);
}
}
}
{
match ty.sty {
ty::ty_struct(did, substs) => {
- op(0, struct_fields(tcx, did, substs).index(&FullRange))
+ op(0, &struct_fields(tcx, did, substs)[])
}
ty::ty_tup(ref v) => {
- op(0, tup_fields(v.index(&FullRange)).index(&FullRange))
+ op(0, &tup_fields(&v[])[])
}
ty::ty_enum(_, substs) => {
// We want the *variant* ID here, not the enum ID.
match node_id_opt {
None => {
- tcx.sess.bug(format!(
+ tcx.sess.bug(&format!(
"cannot get field types from the enum type {} \
without a node ID",
- ty.repr(tcx)).index(&FullRange));
+ ty.repr(tcx))[]);
}
Some(node_id) => {
let def = tcx.def_map.borrow()[node_id].clone();
let variant_info = ty::enum_variant_with_id(
tcx, enum_id, variant_id);
op(variant_info.disr_val,
- struct_fields(tcx,
+ &struct_fields(tcx,
variant_id,
- substs).index(&FullRange))
+ substs)[])
}
_ => {
tcx.sess.bug("resolve didn't map this expr to a \
}
_ => {
- tcx.sess.bug(format!(
+ tcx.sess.bug(&format!(
"cannot get field types from the type {}",
- ty.repr(tcx)).index(&FullRange));
+ ty.repr(tcx))[]);
}
}
}
lldiscrim_a, true),
cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
_ => {
- ccx.sess().bug(format!("translating unsupported cast: \
+ ccx.sess().bug(&format!("translating unsupported cast: \
{} ({:?}) -> {} ({:?})",
t_in.repr(bcx.tcx()),
k_in,
t_out.repr(bcx.tcx()),
- k_out).index(&FullRange))
+ k_out)[])
}
}
}
- _ => ccx.sess().bug(format!("translating unsupported cast: \
+ _ => ccx.sess().bug(&format!("translating unsupported cast: \
{} ({:?}) -> {} ({:?})",
t_in.repr(bcx.tcx()),
k_in,
t_out.repr(bcx.tcx()),
- k_out).index(&FullRange))
+ k_out)[])
};
return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
}
_ => {
bcx.tcx().sess.span_bug(
expr.span,
- format!("deref invoked on expr of illegal type {}",
- datum.ty.repr(bcx.tcx())).index(&FullRange));
+ &format!("deref invoked on expr of illegal type {}",
+ datum.ty.repr(bcx.tcx()))[]);
}
};
let llty = type_of::type_of(ccx, ty);
let ident = link_name(foreign_item);
- match attr::first_attr_value_str_by_name(foreign_item.attrs.index(&FullRange),
+ match attr::first_attr_value_str_by_name(&foreign_item.attrs[],
"linkage") {
// If this is a static with a linkage specified, then we need to handle
// it a little specially. The typesystem prevents things like &T and
_ => ccx.sess().bug("trans_native_call called on non-function type")
};
let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);
- let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.index(&FullRange));
+ let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[]);
let fn_type = cabi::compute_abi_info(ccx,
- llsig.llarg_tys.index(&FullRange),
+ &llsig.llarg_tys[],
llsig.llret_ty,
llsig.ret_def);
- let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.index(&FullRange);
+ let arg_tys: &[cabi::ArgType] = &fn_type.arg_tys[];
let mut llargs_foreign = Vec::new();
let llforeign_retval = CallWithConv(bcx,
llfn,
- llargs_foreign.index(&FullRange),
+ &llargs_foreign[],
cc,
Some(attrs));
abi => {
let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id);
register_foreign_item_fn(ccx, abi, ty,
- lname.get().index(&FullRange));
+ &lname.get()[]);
// Unlike for other items, we shouldn't call
// `base::update_linkage` here. Foreign items have
// special linkage requirements, which are handled
assert!(f.abi != Rust && f.abi != RustIntrinsic);
}
_ => {
- ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \
+ ccx.sess().bug(&format!("build_rust_fn: extern fn {} has ty {}, \
expected a bare fn ty",
ccx.tcx().map.path_to_string(id),
- t.repr(tcx)).index(&FullRange));
+ t.repr(tcx))[]);
}
};
ccx.tcx().map.path_to_string(id),
id, t.repr(tcx));
- let llfn = base::decl_internal_rust_fn(ccx, t, ps.index(&FullRange));
+ let llfn = base::decl_internal_rust_fn(ccx, t, &ps[]);
base::set_llvm_fn_attrs(ccx, attrs, llfn);
base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]);
llfn
// the massive simplifications that have occurred.
pub fn link_name(i: &ast::ForeignItem) -> InternedString {
- match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), "link_name") {
+ match attr::first_attr_value_str_by_name(&i.attrs[], "link_name") {
Some(ln) => ln.clone(),
- None => match weak_lang_items::link_name(i.attrs.index(&FullRange)) {
+ None => match weak_lang_items::link_name(&i.attrs[]) {
Some(name) => name,
None => token::get_ident(i.ident),
}
let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);
let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs.as_slice());
let fn_ty = cabi::compute_abi_info(ccx,
- llsig.llarg_tys.index(&FullRange),
+ &llsig.llarg_tys[],
llsig.llret_ty,
llsig.ret_def);
debug!("foreign_types_for_fn_ty(\
fn_ty={} -> {}, \
ret_def={}",
ty.repr(ccx.tcx()),
- ccx.tn().types_to_str(llsig.llarg_tys.index(&FullRange)),
+ ccx.tn().types_to_str(&llsig.llarg_tys[]),
ccx.tn().type_to_string(llsig.llret_ty),
ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()),
ccx.tn().type_to_string(fn_ty.ret_ty.ty),
if tys.fn_sig.variadic {
Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty)
} else {
- Type::func(llargument_tys.index(&FullRange), &llreturn_ty)
+ Type::func(&llargument_tys[], &llreturn_ty)
}
}
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
Some(old_sym) => {
- let glue = decl_cdecl_fn(ccx, old_sym.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx()));
+ let glue = decl_cdecl_fn(ccx, &old_sym[], llfnty, ty::mk_nil(ccx.tcx()));
(glue, None)
},
None => {
assert!(sig.inputs.len() == 1);
sig.inputs[0]
}
- _ => bcx.sess().bug(format!("Expected function type, found {}",
- bcx.ty_to_string(fty)).index(&FullRange))
+ _ => bcx.sess().bug(&format!("Expected function type, found {}",
+ bcx.ty_to_string(fty))[])
};
let (struct_data, info) = if type_is_sized(bcx.tcx(), t) {
class_did,
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
- let (_, variant_cx) = invoke(variant_cx, dtor_addr, args.index(&FullRange), dtor_ty, None);
+ let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, None);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u))
}
- _ => bcx.sess().bug(format!("Unexpected unsized type, found {}",
- bcx.ty_to_string(t)).index(&FullRange))
+ _ => bcx.sess().bug(&format!("Unexpected unsized type, found {}",
+ bcx.ty_to_string(t))[])
}
}
} else {
// Give the user a heads up that we are doing something
// stupid and dangerous.
- bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\
+ bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\
because the struct is unsized. See issue\
#16758",
- bcx.ty_to_string(t)).index(&FullRange));
+ bcx.ty_to_string(t))[]);
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
}
note_unique_llvm_symbol(ccx, name);
let ty_name = token::intern_and_get_ident(
- ppaux::ty_to_string(ccx.tcx(), t).index(&FullRange));
+ &ppaux::ty_to_string(ccx.tcx(), t)[]);
let ty_name = C_str_slice(ccx, ty_name);
debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t));
let fn_nm = mangle_internal_name_by_type_and_seq(
ccx,
t,
- format!("glue_{}", name).index(&FullRange));
- let llfn = decl_cdecl_fn(ccx, fn_nm.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx()));
+ &format!("glue_{}", name)[]);
+ let llfn = decl_cdecl_fn(ccx, &fn_nm[], llfnty, ty::mk_nil(ccx.tcx()));
note_unique_llvm_symbol(ccx, fn_nm.clone());
return (fn_nm, llfn);
}
match *impl_item {
ast::MethodImplItem(ref method) => {
if method.pe_generics().ty_params.len() == 0u {
- let trans_everywhere = attr::requests_inline(method.attrs.index(&FullRange));
+ let trans_everywhere = attr::requests_inline(&method.attrs[]);
for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
let llfn = get_item_val(ccx, method.id);
trans_fn(ccx,
// Here, in this call, which I've written with explicit UFCS
// notation, the set of type parameters will be:
//
- // rcvr_type: .index(&FullRange) <-- nothing declared on the trait itself
+ // rcvr_type: [] <-- nothing declared on the trait itself
// rcvr_self: [Vec<int>] <-- the self type
// rcvr_method: [String] <-- method type parameter
//
//
// Recall that we matched `<Vec<int> as Convert>`. Trait
// resolution will have given us a substitution
- // containing `impl_substs=[[T=int],.index(&FullRange),.index(&FullRange)]` (the type
+ // containing `impl_substs=[[T=int],[],[]]` (the type
// parameters defined on the impl). We combine
// that with the `rcvr_method` from before, which tells us
// the type parameters from the *method*, to yield
- // `callee_substs=[[T=int],.index(&FullRange),[U=String]]`.
+ // `callee_substs=[[T=int],[],[U=String]]`.
let subst::SeparateVecsPerParamSpace {
types: impl_type,
selfs: impl_self,
callee_substs)
}
_ => {
- tcx.sess.bug(format!("static call to invalid vtable: {}",
- vtbl.repr(tcx)).index(&FullRange));
+ tcx.sess.bug(&format!("static call to invalid vtable: {}",
+ vtbl.repr(tcx))[]);
}
}
}
traits::VtableBuiltin(..) |
traits::VtableParam(..) => {
bcx.sess().bug(
- format!("resolved vtable bad vtable {} in trans",
- vtable.repr(bcx.tcx())).index(&FullRange));
+ &format!("resolved vtable bad vtable {} in trans",
+ vtable.repr(bcx.tcx()))[]);
}
}
}
}
traits::VtableParam => {
bcx.sess().bug(
- format!("resolved vtable for {} to bad vtable {} in trans",
+ &format!("resolved vtable for {} to bad vtable {} in trans",
trait_ref.repr(bcx.tcx()),
- vtable.repr(bcx.tcx())).index(&FullRange));
+ vtable.repr(bcx.tcx()))[]);
}
}
});
let components: Vec<_> = head.into_iter().chain(ptrs).collect();
unsafe {
- let tbl = C_struct(ccx, components.index(&FullRange), false);
+ let tbl = C_struct(ccx, &components[], false);
let sym = token::gensym("vtable");
let buf = CString::from_vec(format!("vtable{}", sym.uint()).into_bytes());
let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(),
hash = format!("h{}", state.result());
ccx.tcx().map.with_path(fn_id.node, |path| {
- exported_name(path, hash.index(&FullRange))
+ exported_name(path, &hash[])
})
};
let mut hash_id = Some(hash_id);
let mut mk_lldecl = |&mut : abi: abi::Abi| {
let lldecl = if abi != abi::Rust {
- foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.index(&FullRange))
+ foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[])
} else {
- decl_internal_rust_fn(ccx, mono_ty, s.index(&FullRange))
+ decl_internal_rust_fn(ccx, mono_ty, &s[])
};
ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl);
..
} => {
let d = mk_lldecl(abi);
- let needs_body = setup_lldecl(d, i.attrs.index(&FullRange));
+ let needs_body = setup_lldecl(d, &i.attrs[]);
if needs_body {
if abi != abi::Rust {
foreign::trans_rust_fn_with_foreign_abi(
ccx, &**decl, &**body, &[], d, psubsts, fn_id.node,
- Some(hash.index(&FullRange)));
+ Some(&hash[]));
} else {
trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]);
}
trans_enum_variant(ccx,
parent,
&*v,
- args.index(&FullRange),
+ &args[],
this_tv.disr_val,
psubsts,
d);
match *ii {
ast::MethodImplItem(ref mth) => {
let d = mk_lldecl(abi::Rust);
- let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange));
+ let needs_body = setup_lldecl(d, &mth.attrs[]);
if needs_body {
trans_fn(ccx,
mth.pe_fn_decl(),
match *method {
ast::ProvidedMethod(ref mth) => {
let d = mk_lldecl(abi::Rust);
- let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange));
+ let needs_body = setup_lldecl(d, &mth.attrs[]);
if needs_body {
trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d,
psubsts, mth.id, &[]);
d
}
_ => {
- ccx.sess().bug(format!("can't monomorphize a {:?}",
- map_node).index(&FullRange))
+ ccx.sess().bug(&format!("can't monomorphize a {:?}",
+ map_node)[])
}
}
}
let d = mk_lldecl(abi::Rust);
set_inline_hint(d);
base::trans_tuple_struct(ccx,
- struct_def.fields.index(&FullRange),
+ &struct_def.fields[],
struct_def.ctor_id.expect("ast-mapped tuple struct \
didn't have a ctor id"),
psubsts,
ast_map::NodeBlock(..) |
ast_map::NodePat(..) |
ast_map::NodeLocal(..) => {
- ccx.sess().bug(format!("can't monomorphize a {:?}",
- map_node).index(&FullRange))
+ ccx.sess().bug(&format!("can't monomorphize a {:?}",
+ map_node)[])
}
};
}
pub fn int(ccx: &CrateContext) -> Type {
- match ccx.tcx().sess.target.target.target_word_size.index(&FullRange) {
+ match &ccx.tcx().sess.target.target.target_word_size[] {
"32" => Type::i32(ccx),
"64" => Type::i64(ccx),
tws => panic!("Unsupported target word size for int: {}", tws),
let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty));
atys.extend(input_tys);
- Type::func(atys.index(&FullRange), &lloutputtype)
+ Type::func(&atys[], &lloutputtype)
}
// Given a function type and a count of ty params, construct an llvm type
let llsizingty = match t.sty {
_ if !lltype_is_sized(cx.tcx(), t) => {
- cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type",
- ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange))
+ cx.sess().bug(&format!("trying to take the sizing type of {}, an unsized type",
+ ppaux::ty_to_string(cx.tcx(), t))[])
}
ty::ty_bool => Type::bool(cx),
}
ty::ty_projection(..) | ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => {
- cx.sess().bug(format!("fictitious type {} in sizing_type_of()",
- ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange))
+ cx.sess().bug(&format!("fictitious type {} in sizing_type_of()",
+ ppaux::ty_to_string(cx.tcx(), t))[])
}
ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable")
};
let repr = adt::represent_type(cx, t);
let tps = substs.types.get_slice(subst::TypeSpace);
let name = llvm_type_name(cx, an_enum, did, tps);
- adt::incomplete_type_of(cx, &*repr, name.index(&FullRange))
+ adt::incomplete_type_of(cx, &*repr, &name[])
}
ty::ty_unboxed_closure(did, _, ref substs) => {
// Only create the named struct, but don't fill it in. We
// contents of the VecPerParamSpace to to construct the llvm
// name
let name = llvm_type_name(cx, an_unboxed_closure, did, substs.types.as_slice());
- adt::incomplete_type_of(cx, &*repr, name.index(&FullRange))
+ adt::incomplete_type_of(cx, &*repr, &name[])
}
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => {
let repr = adt::represent_type(cx, t);
let tps = substs.types.get_slice(subst::TypeSpace);
let name = llvm_type_name(cx, a_struct, did, tps);
- adt::incomplete_type_of(cx, &*repr, name.index(&FullRange))
+ adt::incomplete_type_of(cx, &*repr, &name[])
}
}
Type::struct_(cx, &[p_ty, type_of_unsize_info(cx, t)], false)
}
ty::ty_trait(..) => Type::opaque_trait(cx),
- _ => cx.sess().bug(format!("ty_open with sized type: {}",
- ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange))
+ _ => cx.sess().bug(&format!("ty_open with sized type: {}",
+ ppaux::ty_to_string(cx.tcx(), t))[])
},
ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"),
format!("`{}`", name)
};
- m.push_str(if n == 1 {
+ m.push_str(&(if n == 1 {
help_name
} else {
format!("one of {}'s {} elided lifetimes", help_name, n)
- }.index(&FullRange));
+ })[]);
if len == 2 && i == 0 {
m.push_str(" or ");
"expected"
};
this.tcx().sess.span_fatal(span,
- format!("wrong number of type arguments: {} {}, found {}",
+ &format!("wrong number of type arguments: {} {}, found {}",
expected,
required_ty_param_count,
- supplied_ty_param_count).index(&FullRange));
+ supplied_ty_param_count)[]);
} else if supplied_ty_param_count > formal_ty_param_count {
let expected = if required_ty_param_count < formal_ty_param_count {
"expected at most"
"expected"
};
this.tcx().sess.span_fatal(span,
- format!("wrong number of type arguments: {} {}, found {}",
+ &format!("wrong number of type arguments: {} {}, found {}",
expected,
formal_ty_param_count,
- supplied_ty_param_count).index(&FullRange));
+ supplied_ty_param_count)[]);
}
let mut substs = Substs::new_type(types, regions);
}
}
- for param in ty_param_defs.index(&(supplied_ty_param_count..)).iter() {
+ for param in ty_param_defs[supplied_ty_param_count..].iter() {
match param.default {
Some(default) => {
// This is a default type parameter.
_ => {
this.tcx().sess.span_fatal(
ast_trait_ref.path.span,
- format!("`{}` is not a trait",
- ast_trait_ref.path.user_string(this.tcx())).index(&FullRange));
+ &format!("`{}` is not a trait",
+ ast_trait_ref.path.user_string(this.tcx()))[]);
}
}
}
this.tcx()
.sess
.span_bug(ast_ty.span,
- format!("unbound path {}",
- path.repr(this.tcx())).index(&FullRange))
+ &format!("unbound path {}",
+ path.repr(this.tcx()))[])
}
Some(&d) => d
};
_ => {
this.tcx().sess.span_bug(
path.span,
- format!("converting `Box` to `{}`",
- ty.repr(this.tcx())).index(&FullRange));
+ &format!("converting `Box` to `{}`",
+ ty.repr(this.tcx()))[]);
}
}
}
ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None)
}
ast::TyObjectSum(ref ty, ref bounds) => {
- match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.index(&FullRange)) {
+ match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[]) {
Ok((trait_ref, projection_bounds)) => {
trait_ref_to_object_type(this,
rscope,
ast_ty.span,
trait_ref,
projection_bounds,
- bounds.index(&FullRange))
+ &bounds[])
}
Err(ErrorReported) => {
this.tcx().types.err
ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn))
}
ast::TyPolyTraitRef(ref bounds) => {
- conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.index(&FullRange))
+ conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[])
}
ast::TyPath(ref path, id) => {
let a_def = match tcx.def_map.borrow().get(&id) {
None => {
tcx.sess
.span_bug(ast_ty.span,
- format!("unbound path {}",
- path.repr(tcx)).index(&FullRange))
+ &format!("unbound path {}",
+ path.repr(tcx))[])
}
Some(&d) => d
};
}
def::DefMod(id) => {
tcx.sess.span_fatal(ast_ty.span,
- format!("found module name used as a type: {}",
- tcx.map.node_to_string(id.node)).index(&FullRange));
+ &format!("found module name used as a type: {}",
+ tcx.map.node_to_string(id.node))[]);
}
def::DefPrimTy(_) => {
panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");
let path_str = tcx.map.path_to_string(
tcx.map.get_parent(trait_type_id.node));
tcx.sess.span_err(ast_ty.span,
- format!("ambiguous associated \
+ &format!("ambiguous associated \
type; specify the type \
using the syntax `<Type \
as {}>::{}`",
.last()
.unwrap()
.identifier)
- .get()).index(&FullRange));
+ .get())[]);
this.tcx().types.err
}
def::DefAssociatedPath(provenance, assoc_ident) => {
}
_ => {
tcx.sess.span_fatal(ast_ty.span,
- format!("found value name used \
+ &format!("found value name used \
as a type: {:?}",
- a_def).index(&FullRange));
+ a_def)[]);
}
}
}
Err(ref r) => {
tcx.sess.span_fatal(
ast_ty.span,
- format!("expected constant expr for array \
+ &format!("expected constant expr for array \
length: {}",
- *r).index(&FullRange));
+ *r)[]);
}
}
}
let input_params = if self_ty.is_some() {
decl.inputs.slice_from(1)
} else {
- decl.inputs.index(&FullRange)
+ &decl.inputs[]
};
let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None));
let input_pats: Vec<String> = input_params.iter()
ast_bounds: &[ast::TyParamBound])
-> Ty<'tcx>
{
- let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds.index(&FullRange));
+ let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[]);
let mut projection_bounds = Vec::new();
let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() {
let b = &trait_bounds[0];
this.tcx().sess.span_err(
b.trait_ref.path.span,
- format!("only the builtin traits can be used \
- as closure or object bounds").index(&FullRange));
+ &format!("only the builtin traits can be used \
+ as closure or object bounds")[]);
}
let region_bound = compute_region_bound(this,
if derived_region_bounds.slice_from(1).iter().any(|r1| r != *r1) {
tcx.sess.span_err(
span,
- format!("ambiguous lifetime bound, \
- explicit lifetime bound required").index(&FullRange));
+ &format!("ambiguous lifetime bound, \
+ explicit lifetime bound required")[]);
}
return Some(r);
}
None => {
this.tcx().sess.span_err(
span,
- format!("explicit lifetime bound required").index(&FullRange));
+ &format!("explicit lifetime bound required")[]);
ty::ReStatic
}
}
None => {
self.tcx().sess.span_bug(
self.span,
- format!("self-type `{}` for ObjectPick never dereferenced to an object",
- self_ty.repr(self.tcx())).index(&FullRange))
+ &format!("self-type `{}` for ObjectPick never dereferenced to an object",
+ self_ty.repr(self.tcx()))[])
}
}
}
Err(_) => {
self.tcx().sess.span_bug(
self.span,
- format!(
+ &format!(
"{} was a subtype of {} but now is not?",
self_ty.repr(self.tcx()),
- method_self_ty.repr(self.tcx())).index(&FullRange));
+ method_self_ty.repr(self.tcx()))[]);
}
}
}
_ => {
fcx.tcx().sess.span_bug(
span,
- format!(
+ &format!(
"trait method is &self but first arg is: {}",
- transformed_self_ty.repr(fcx.tcx())).index(&FullRange));
+ transformed_self_ty.repr(fcx.tcx()))[]);
}
}
}
_ => {
fcx.tcx().sess.span_bug(
span,
- format!(
+ &format!(
"unexpected explicit self type in operator method: {:?}",
- method_ty.explicit_self).index(&FullRange));
+ method_ty.explicit_self)[]);
}
}
}
// If the method has the name of a field, give a help note
if is_field {
cx.sess.span_note(span,
- format!("use `(s.{0})(...)` if you meant to call the \
- function stored in the `{0}` field", method_ustring).index(&FullRange));
+ &format!("use `(s.{0})(...)` if you meant to call the \
+ function stored in the `{0}` field", method_ustring)[]);
}
if static_sources.len() > 0 {
None => {
self.tcx().sess.span_bug(
self.span,
- format!("No entry for unboxed closure: {}",
- closure_def_id.repr(self.tcx())).index(&FullRange));
+ &format!("No entry for unboxed closure: {}",
+ closure_def_id.repr(self.tcx()))[]);
}
};
debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty));
debug!("searching inherent candidates");
- match self.consider_candidates(self_ty, self.inherent_candidates.index(&FullRange)) {
+ match self.consider_candidates(self_ty, &self.inherent_candidates[]) {
None => {}
Some(pick) => {
return Some(pick);
}
debug!("searching extension candidates");
- self.consider_candidates(self_ty, self.extension_candidates.index(&FullRange))
+ self.consider_candidates(self_ty, &self.extension_candidates[])
}
fn consider_candidates(&self,
debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx()));
if applicable_candidates.len() > 1 {
- match self.collapse_candidates_to_trait_pick(applicable_candidates.index(&FullRange)) {
+ match self.collapse_candidates_to_trait_pick(&applicable_candidates[]) {
Some(pick) => { return Some(Ok(pick)); }
None => { }
}
Some(data) => data,
None => return None,
};
- if probes.index(&(1..)).iter().any(|p| p.to_trait_data() != Some(trait_data)) {
+ if probes[1..].iter().any(|p| p.to_trait_data() != Some(trait_data)) {
return None;
}
let tcx = ccx.tcx;
let err_count_on_creation = tcx.sess.err_count();
- let arg_tys = fn_sig.inputs.index(&FullRange);
+ let arg_tys = &fn_sig.inputs[];
let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})",
ast::ItemEnum(ref enum_definition, _) => {
check_enum_variants(ccx,
it.span,
- enum_definition.variants.index(&FullRange),
+ &enum_definition.variants[],
it.id);
}
ast::ItemFn(ref decl, _, _, _, ref body) => {
(&ty::StaticExplicitSelfCategory, _) => {
tcx.sess.span_err(
impl_m_span,
- format!("method `{}` has a `{}` declaration in the impl, \
+ &format!("method `{}` has a `{}` declaration in the impl, \
but not in the trait",
token::get_name(trait_m.name),
ppaux::explicit_self_category_to_str(
- &impl_m.explicit_self)).index(&FullRange));
+ &impl_m.explicit_self))[]);
return;
}
(_, &ty::StaticExplicitSelfCategory) => {
tcx.sess.span_err(
impl_m_span,
- format!("method `{}` has a `{}` declaration in the trait, \
+ &format!("method `{}` has a `{}` declaration in the trait, \
but not in the impl",
token::get_name(trait_m.name),
ppaux::explicit_self_category_to_str(
- &trait_m.explicit_self)).index(&FullRange));
+ &trait_m.explicit_self))[]);
return;
}
_ => {
if trait_params.len() != impl_params.len() {
tcx.sess.span_err(
span,
- format!("lifetime parameters or bounds on method `{}` do \
+ &format!("lifetime parameters or bounds on method `{}` do \
not match the trait declaration",
- token::get_name(impl_m.name)).index(&FullRange));
+ token::get_name(impl_m.name))[]);
return false;
}
let err = if missing.len() != 0 || extra.len() != 0 {
tcx.sess.span_err(
span,
- format!(
+ &format!(
"the lifetime parameter `{}` declared in the impl \
has a distinct set of bounds \
from its counterpart `{}` \
declared in the trait",
impl_param.name.user_string(tcx),
- trait_param.name.user_string(tcx)).index(&FullRange));
+ trait_param.name.user_string(tcx))[]);
true
} else {
false
if missing.len() != 0 {
tcx.sess.span_note(
span,
- format!("the impl is missing the following bounds: `{}`",
- missing.user_string(tcx)).index(&FullRange));
+ &format!("the impl is missing the following bounds: `{}`",
+ missing.user_string(tcx))[]);
}
if extra.len() != 0 {
tcx.sess.span_note(
span,
- format!("the impl has the following extra bounds: `{}`",
- extra.user_string(tcx)).index(&FullRange));
+ &format!("the impl has the following extra bounds: `{}`",
+ extra.user_string(tcx))[]);
}
if err {
None => {
self.tcx().sess.span_bug(
span,
- format!("no type for local variable {}",
- nid).index(&FullRange));
+ &format!("no type for local variable {}",
+ nid)[]);
}
}
}
match self.inh.node_types.borrow().get(&ex.id) {
Some(&t) => t,
None => {
- self.tcx().sess.bug(format!("no type for expr in fcx {}",
- self.tag()).index(&FullRange));
+ self.tcx().sess.bug(&format!("no type for expr in fcx {}",
+ self.tag())[]);
}
}
}
Some(&t) => t,
None => {
self.tcx().sess.bug(
- format!("no type for node {}: {} in fcx {}",
+ &format!("no type for node {}: {} in fcx {}",
id, self.tcx().map.node_to_string(id),
- self.tag()).index(&FullRange));
+ self.tag())[]);
}
}
}
Ok(trait_did) => trait_did,
Err(ref err_string) => {
fcx.tcx().sess.span_err(iterator_expr.span,
- err_string.index(&FullRange));
+ &err_string[]);
return fcx.tcx().types.err
}
};
if !ty::type_is_error(true_expr_type) {
let ty_string = fcx.infcx().ty_to_string(true_expr_type);
fcx.tcx().sess.span_err(iterator_expr.span,
- format!("`for` loop expression has type `{}` which does \
+ &format!("`for` loop expression has type `{}` which does \
not implement the `Iterator` trait; \
maybe try .iter()",
- ty_string).index(&FullRange));
+ ty_string)[]);
}
fcx.tcx().types.err
}
}
_ => {
fcx.tcx().sess.span_err(iterator_expr.span,
- format!("`next` method of the `Iterator` \
+ &format!("`next` method of the `Iterator` \
trait has an unexpected type `{}`",
fcx.infcx().ty_to_string(return_type))
- .index(&FullRange));
+ []);
fcx.tcx().types.err
}
}
check_argument_types(fcx,
sp,
- err_inputs.index(&FullRange),
+ &err_inputs[],
args_no_rcvr,
autoref_args,
false,
};
// Call the generic checker.
- let args: Vec<_> = args.index(&(1..)).iter().map(|x| x).collect();
+ let args: Vec<_> = args[1..].iter().map(|x| x).collect();
let ret_ty = check_method_argument_types(fcx,
method_name.span,
fn_ty,
ty::ty_struct(base_id, substs) => {
debug!("struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id);
- lookup_field_ty(tcx, base_id, fields.index(&FullRange),
+ lookup_field_ty(tcx, base_id, &fields[],
field.node.name, &(*substs))
}
_ => None
if tuple_like {
debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id);
- lookup_tup_field_ty(tcx, base_id, fields.index(&FullRange),
+ lookup_tup_field_ty(tcx, base_id, &fields[],
idx.node, &(*substs))
} else {
None
class_id,
id,
fcx.ccx.tcx.mk_substs(struct_substs),
- class_fields.index(&FullRange),
+ &class_fields[],
fields,
base_expr.is_none(),
None);
variant_id,
id,
fcx.ccx.tcx.mk_substs(substitutions),
- variant_fields.index(&FullRange),
+ &variant_fields[],
fields,
true,
Some(enum_id));
let expected = expected.only_has_type();
let flds = expected.map_to_option(fcx, |ty| {
match ty.sty {
- ty::ty_tup(ref flds) => Some(flds.index(&FullRange)),
+ ty::ty_tup(ref flds) => Some(&flds[]),
_ => None
}
});
let struct_id = match def {
Some(def::DefVariant(enum_id, variant_id, true)) => {
check_struct_enum_variant(fcx, id, expr.span, enum_id,
- variant_id, fields.index(&FullRange));
+ variant_id, &fields[]);
enum_id
}
Some(def::DefTrait(def_id)) => {
pprust::path_to_string(path));
check_struct_fields_on_error(fcx,
id,
- fields.index(&FullRange),
+ &fields[],
base_expr);
def_id
},
id,
expr.span,
struct_did,
- fields.index(&FullRange),
+ &fields[],
base_expr.as_ref().map(|e| &**e));
}
_ => {
pprust::path_to_string(path));
check_struct_fields_on_error(fcx,
id,
- fields.index(&FullRange),
+ &fields[],
base_expr);
}
}
fcx.tcx()
.sess
.span_err(path.span,
- format!("structure constructor specifies a \
+ &format!("structure constructor specifies a \
structure of type `{}`, but this \
structure has type `{}`: {}",
fcx.infcx()
fcx.infcx()
.ty_to_string(
actual_structure_type),
- type_error_description).index(&FullRange));
+ type_error_description)[]);
ty::note_and_explain_type_err(tcx, &type_error);
}
}
}
let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id })
- .index(&FullRange).get(0).unwrap_or(&attr::ReprAny);
+ [].get(0).unwrap_or(&attr::ReprAny);
if hint != attr::ReprAny && vs.len() <= 1 {
if vs.len() == 1 {
"get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
- Err(s) => { tcx.sess.span_fatal(it.span, s.index(&FullRange)); }
+ Err(s) => { tcx.sess.span_fatal(it.span, &s[]); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,
ty::mk_struct(ccx.tcx, did,
ccx.tcx.mk_substs(subst::Substs::empty()))),
Err(msg) => {
- tcx.sess.span_fatal(it.span, msg.index(&FullRange));
+ tcx.sess.span_fatal(it.span, &msg[]);
}
}
},
}
}
_ => {
- tcx.sess.bug(format!("unexpected def in region_of_def: {:?}",
- def).index(&FullRange))
+ tcx.sess.bug(&format!("unexpected def in region_of_def: {:?}",
+ def)[])
}
}
}
Some(f) => f,
None => {
self.tcx().sess.bug(
- format!("No fn-sig entry for id={}", id).index(&FullRange));
+ &format!("No fn-sig entry for id={}", id)[]);
}
};
let len = self.region_bound_pairs.len();
- self.relate_free_regions(fn_sig.index(&FullRange), body.id);
- link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs.index(&FullRange));
+ self.relate_free_regions(&fn_sig[], body.id);
+ link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[]);
self.visit_block(body);
self.visit_region_obligations(body.id);
self.region_bound_pairs.truncate(len);
}
ast::ExprMatch(ref discr, ref arms, _) => {
- link_match(rcx, &**discr, arms.index(&FullRange));
+ link_match(rcx, &**discr, &arms[]);
visit::walk_expr(rcx, expr);
}
let (m, r) = match self_ty.sty {
ty::ty_rptr(r, ref m) => (m.mutbl, r),
_ => rcx.tcx().sess.span_bug(deref_expr.span,
- format!("bad overloaded deref type {}",
- method.ty.repr(rcx.tcx())).index(&FullRange))
+ &format!("bad overloaded deref type {}",
+ method.ty.repr(rcx.tcx()))[])
};
{
let mc = mc::MemCategorizationContext::new(rcx.fcx);
None => {
rcx.tcx().sess.span_bug(
span,
- format!("Illegal upvar id: {}",
+ &format!("Illegal upvar id: {}",
upvar_id.repr(
- rcx.tcx())).index(&FullRange));
+ rcx.tcx()))[]);
}
}
}
ty::ty_open(_) => {
self.tcx.sess.bug(
- format!("Unexpected type encountered while doing wf check: {}",
- ty.repr(self.tcx)).index(&FullRange));
+ &format!("Unexpected type encountered while doing wf check: {}",
+ ty.repr(self.tcx))[]);
}
}
}
(_, &ty::ty_uniq(..)) => {
fcx.ccx.tcx.sess.span_err(
source_expr.span,
- format!("can only cast an boxed pointer \
+ &format!("can only cast an boxed pointer \
to a boxed object, not a {}",
- ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange));
+ ty::ty_sort_string(fcx.tcx(), source_ty))[]);
}
(_, &ty::ty_rptr(..)) => {
fcx.ccx.tcx.sess.span_err(
source_expr.span,
- format!("can only cast a &-pointer \
+ &format!("can only cast a &-pointer \
to an &-object, not a {}",
- ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange));
+ ty::ty_sort_string(fcx.tcx(), source_ty))[]);
}
_ => {
// that the user can type
inference_context.tcx.sess.span_bug(
span,
- format!("coherence encountered unexpected type searching for base type: {}",
- ty.repr(inference_context.tcx)).index(&FullRange));
+ &format!("coherence encountered unexpected type searching for base type: {}",
+ ty.repr(inference_context.tcx))[]);
}
}
}
Err(ty::FieldDoesNotImplementCopy(name)) => {
tcx.sess
.span_err(span,
- format!("the trait `Copy` may not be \
+ &format!("the trait `Copy` may not be \
implemented for this type; field \
`{}` does not implement `Copy`",
- token::get_name(name)).index(&FullRange))
+ token::get_name(name))[])
}
Err(ty::VariantDoesNotImplementCopy(name)) => {
tcx.sess
.span_err(span,
- format!("the trait `Copy` may not be \
+ &format!("the trait `Copy` may not be \
implemented for this type; variant \
`{}` does not implement `Copy`",
- token::get_name(name)).index(&FullRange))
+ token::get_name(name))[])
}
Err(ty::TypeIsStructural) => {
tcx.sess
ast::TupleVariantKind(ref args) if args.len() > 0 => {
let rs = ExplicitRscope;
let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect();
- ty::mk_ctor_fn(tcx, variant_def_id, input_tys.index(&FullRange), enum_ty)
+ ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[], enum_ty)
}
ast::TupleVariantKind(_) => {
ccx,
trait_id,
&trait_def.generics,
- trait_items.index(&FullRange),
+ &trait_items[],
&m.id,
&m.ident.name,
&m.explicit_self,
ccx,
trait_id,
&trait_def.generics,
- trait_items.index(&FullRange),
+ &trait_items[],
&m.id,
&m.pe_ident().name,
m.pe_explicit_self(),
local_def(field.node.id)].ty).collect();
let ctor_fn_ty = ty::mk_ctor_fn(tcx,
local_def(ctor_id),
- inputs.index(&FullRange),
+ &inputs[],
selfty);
write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty);
tcx.tcache.borrow_mut().insert(local_def(ctor_id),
match ccx.tcx.map.get(trait_id.node) {
ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item),
_ => {
- ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
- trait_id.node).index(&FullRange))
+ ccx.tcx.sess.bug(&format!("get_trait_def({}): not an item",
+ trait_id.node)[])
}
}
}
ref s => {
tcx.sess.span_bug(
it.span,
- format!("trait_def_of_item invoked on {:?}", s).index(&FullRange));
+ &format!("trait_def_of_item invoked on {:?}", s)[]);
}
};
-> ty::Generics<'tcx> {
ty_generics(ccx,
subst::TypeSpace,
- generics.lifetimes.index(&FullRange),
- generics.ty_params.index(&FullRange),
+ &generics.lifetimes[],
+ &generics.ty_params[],
ty::Generics::empty(),
&generics.where_clause)
}
let mut generics =
ty_generics(ccx,
subst::TypeSpace,
- ast_generics.lifetimes.index(&FullRange),
- ast_generics.ty_params.index(&FullRange),
+ &ast_generics.lifetimes[],
+ &ast_generics.ty_params[],
ty::Generics::empty(),
&ast_generics.where_clause);
let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics);
ty_generics(ccx,
subst::FnSpace,
- early_lifetimes.index(&FullRange),
- generics.ty_params.index(&FullRange),
+ &early_lifetimes[],
+ &generics.ty_params[],
base_generics,
&generics.where_clause)
}
let param_ty = ty::ParamTy::new(space, index, param.ident.name);
let bounds = compute_bounds(ccx,
param_ty.to_ty(ccx.tcx),
- param.bounds.index(&FullRange),
+ ¶m.bounds[],
SizedByDefault::Yes,
param.span);
let default = match param.default {
if !param_bounds.builtin_bounds.contains(&ty::BoundSized) {
ty::each_bound_trait_and_supertraits(
tcx,
- param_bounds.trait_bounds.index(&FullRange),
+ ¶m_bounds.trait_bounds[],
|trait_ref| {
let trait_def = ty::lookup_trait_def(tcx, trait_ref.def_id());
if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) {
Ok(_) => true,
Err(ref terr) => {
tcx.sess.span_err(span,
- format!("{}: {}",
+ &format!("{}: {}",
msg(),
ty::type_err_to_str(tcx,
- terr)).index(&FullRange));
+ terr))[]);
ty::note_and_explain_type_err(tcx, terr);
false
}
}
_ => {
tcx.sess.span_bug(main_span,
- format!("main has a non-function type: found \
+ &format!("main has a non-function type: found \
`{}`",
ppaux::ty_to_string(tcx,
- main_t)).index(&FullRange));
+ main_t))[]);
}
}
}
}
_ => {
tcx.sess.span_bug(start_span,
- format!("start has a non-function type: found \
+ &format!("start has a non-function type: found \
`{}`",
- ppaux::ty_to_string(tcx, start_t)).index(&FullRange));
+ ppaux::ty_to_string(tcx, start_t))[]);
}
}
}
match self.terms_cx.inferred_map.get(¶m_id) {
Some(&index) => index,
None => {
- self.tcx().sess.bug(format!(
+ self.tcx().sess.bug(&format!(
"no inferred index entry for {}",
- self.tcx().map.node_to_string(param_id)).index(&FullRange));
+ self.tcx().map.node_to_string(param_id))[]);
}
}
}
ty::ty_infer(..) | ty::ty_err => {
self.tcx().sess.bug(
- format!("unexpected type encountered in \
+ &format!("unexpected type encountered in \
variance inference: {}",
- ty.repr(self.tcx())).index(&FullRange));
+ ty.repr(self.tcx()))[]);
}
}
}
// regions when visiting member types or method types.
self.tcx()
.sess
- .bug(format!("unexpected region encountered in variance \
+ .bug(&format!("unexpected region encountered in variance \
inference: {}",
- region.repr(self.tcx())).index(&FullRange));
+ region.repr(self.tcx()))[]);
}
}
}
// attribute and report an error with various results if found.
if ty::has_attr(tcx, item_def_id, "rustc_variance") {
let found = item_variances.repr(tcx);
- tcx.sess.span_err(tcx.map.span(item_id), found.index(&FullRange));
+ tcx.sess.span_err(tcx.map.span(item_id), &found[]);
}
let newly_added = tcx.item_variance_map.borrow_mut()
let all_inputs = &self.pe_fn_decl().inputs;
let inputs = match self.pe_explicit_self().node {
ast::SelfStatic => all_inputs.as_slice(),
- _ => all_inputs.index(&(1..))
+ _ => &all_inputs[1..]
};
let decl = FnDecl {
inputs: Arguments {
fn clean(&self, cx: &DocContext) -> Item {
let inputs = match self.explicit_self.node {
ast::SelfStatic => self.decl.inputs.as_slice(),
- _ => self.decl.inputs.index(&(1..))
+ _ => &self.decl.inputs[1..]
};
let decl = FnDecl {
inputs: Arguments {
self.fty.sig.clone()),
s => {
let sig = ty::Binder(ty::FnSig {
- inputs: self.fty.sig.0.inputs.index(&(1..)).to_vec(),
+ inputs: self.fty.sig.0.inputs[1..].to_vec(),
..self.fty.sig.0.clone()
});
let s = match s {
match rel_root {
Some(root) => {
let mut root = String::from_str(root.as_slice());
- for seg in path.segments.index(&(0..amt)).iter() {
+ for seg in path.segments[0..amt].iter() {
if "super" == seg.name ||
"self" == seg.name {
try!(write!(w, "{}::", seg.name));
}
}
None => {
- for seg in path.segments.index(&(0..amt)).iter() {
+ for seg in path.segments[0..amt].iter() {
try!(write!(w, "{}::", seg.name));
}
}
class,
id,
&mut out).unwrap();
- String::from_utf8_lossy(out.index(&FullRange)).into_owned()
+ String::from_utf8_lossy(&out[]).into_owned()
}
/// Exhausts the `lexer` writing the output into `out`.
};
if start < i {
- try!(wr.write_str(v.index(&(start..i))));
+ try!(wr.write_str(&v[start..i]));
}
try!(wr.write_str(escaped));
}
if start != v.len() {
- try!(wr.write_str(v.index(&(start..))));
+ try!(wr.write_str(&v[start..]));
}
wr.write_str("\"")
fn escape_char(writer: &mut fmt::Writer, v: char) -> fmt::Result {
let mut buf = [0; 4];
let n = v.encode_utf8(&mut buf).unwrap();
- let buf = unsafe { str::from_utf8_unchecked(buf.index(&(0..n))) };
+ let buf = unsafe { str::from_utf8_unchecked(&buf[0..n]) };
escape_str(writer, buf)
}
}
if n > 0 {
- wr.write_str(BUF.index(&(0..n)))
+ wr.write_str(&BUF[0..n])
} else {
Ok(())
}
let mut check_encoder = Encoder::new(&mut buf);
try!(f(transmute(&mut check_encoder)));
}
- let out = str::from_utf8(buf.index(&FullRange)).unwrap();
+ let out = str::from_utf8(&buf[]).unwrap();
let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"';
if needs_wrapping { try!(write!(self.writer, "\"")); }
try!(f(self));
let mut check_encoder = PrettyEncoder::new(&mut buf);
try!(f(transmute(&mut check_encoder)));
}
- let out = str::from_utf8(buf.index(&FullRange)).unwrap();
+ let out = str::from_utf8(&buf[]).unwrap();
let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"';
if needs_wrapping { try!(write!(self.writer, "\"")); }
try!(f(self));
/// Returns None otherwise.
pub fn as_string<'a>(&'a self) -> Option<&'a str> {
match *self {
- Json::String(ref s) => Some(s.index(&FullRange)),
+ Json::String(ref s) => Some(&s[]),
_ => None
}
}
fn index<'a>(&'a self, idx: &uint) -> &'a Json {
match self {
- &Json::Array(ref v) => v.index(idx),
+ &Json::Array(ref v) => &v[*idx],
_ => panic!("can only index Json with uint if it is an array")
}
}
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => {
StackElement::Key(str::from_utf8(
- self.str_buffer.index(&((start as uint) .. (start as uint + size as uint))))
+ &self.str_buffer[(start as uint) .. (start as uint + size as uint)])
.unwrap())
}
}
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
Some(&InternalKey(start, size)) => {
Some(StackElement::Key(str::from_utf8(
- self.str_buffer.index(&((start as uint) .. (start+size) as uint))
+ &self.str_buffer[(start as uint) .. (start+size) as uint]
).unwrap()))
}
}
return Err(ExpectedError("String or Object".to_string(), format!("{}", json)))
}
};
- let idx = match names.iter().position(|n| *n == name.index(&FullRange)) {
+ let idx = match names.iter().position(|n| *n == &name[]) {
Some(idx) => idx,
None => return Err(UnknownVariantError(name))
};
use std::{i64, u64, f32, f64, io};
use std::collections::BTreeMap;
use std::num::Float;
- use std::ops::Index;
use std::string;
#[derive(RustcDecodable, Eq, PartialEq, Show)]
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(mem_buf.index(&FullRange)).unwrap();
+ let json_str = from_utf8(&mem_buf[]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(mem_buf.index(&FullRange)).unwrap();
+ let json_str = from_utf8(&mem_buf[]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
write!(&mut writer, "{}",
super::as_pretty_json(&json).indent(i)).unwrap();
- let printed = from_utf8(writer.index(&FullRange)).unwrap();
+ let printed = from_utf8(&writer[]).unwrap();
// Check for indents at each line
let lines: Vec<&str> = printed.lines().collect();
impl Encodable for String {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_str(self.index(&FullRange))
+ s.emit_str(&self[])
}
}
let msg = match obj.downcast_ref::<&'static str>() {
Some(s) => *s,
None => match obj.downcast_ref::<String>() {
- Some(s) => s.index(&FullRange),
+ Some(s) => &s[],
None => "Box<Any>",
}
};
use cmp;
use io::{Reader, Writer, Stream, Buffer, DEFAULT_BUF_SIZE, IoResult};
use iter::{IteratorExt, ExactSizeIterator};
-use ops::{Drop, Index};
+use ops::Drop;
use option::Option;
use option::Option::{Some, None};
use result::Result::Ok;
self.cap = try!(self.inner.read(self.buf.as_mut_slice()));
self.pos = 0;
}
- Ok(self.buf.index(&(self.pos..self.cap)))
+ Ok(&self.buf[self.pos..self.cap])
}
fn consume(&mut self, amt: uint) {
let nread = {
let available = try!(self.fill_buf());
let nread = cmp::min(available.len(), buf.len());
- slice::bytes::copy_memory(buf, available.index(&(0..nread)));
+ slice::bytes::copy_memory(buf, &available[0..nread]);
nread
};
self.pos += nread;
fn flush_buf(&mut self) -> IoResult<()> {
if self.pos != 0 {
- let ret = self.inner.as_mut().unwrap().write(self.buf.index(&(0..self.pos)));
+ let ret = self.inner.as_mut().unwrap().write(&self.buf[0..self.pos]);
self.pos = 0;
ret
} else {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
match buf.iter().rposition(|&b| b == b'\n') {
Some(i) => {
- try!(self.inner.write(buf.index(&(0..(i + 1)))));
+ try!(self.inner.write(&buf[0..(i + 1)]));
try!(self.inner.flush());
- try!(self.inner.write(buf.index(&((i + 1)..))));
+ try!(self.inner.write(&buf[(i + 1)..]));
Ok(())
}
None => self.inner.write(buf),
assert_eq!(a, &w.get_ref()[]);
let w = w.into_inner();
let a: &[_] = &[0, 1];
- assert_eq!(a, w.index(&FullRange));
+ assert_eq!(a, &w[]);
}
// This is just here to make sure that we don't infinite loop in the
#[test]
fn read_char_buffered() {
let buf = [195u8, 159u8];
- let mut reader = BufferedReader::with_capacity(1, buf.index(&FullRange));
+ let mut reader = BufferedReader::with_capacity(1, &buf[]);
assert_eq!(reader.read_char(), Ok('ß'));
}
#[test]
fn test_chars() {
let buf = [195u8, 159u8, b'a'];
- let mut reader = BufferedReader::with_capacity(1, buf.index(&FullRange));
+ let mut reader = BufferedReader::with_capacity(1, &buf[]);
let mut it = reader.chars();
assert_eq!(it.next(), Some(Ok('ß')));
assert_eq!(it.next(), Some(Ok('a')));
use sync::mpsc::{Sender, Receiver};
use io;
use option::Option::{None, Some};
-use ops::Index;
use result::Result::{Ok, Err};
use slice::{bytes, SliceExt};
use super::{Buffer, Reader, Writer, IoResult};
Some(src) => {
let dst = buf.slice_from_mut(num_read);
let count = cmp::min(src.len(), dst.len());
- bytes::copy_memory(dst, src.index(&(0..count)));
+ bytes::copy_memory(dst, &src[0..count]);
count
},
None => 0,
let mut read_buf = [0; 1028];
let read_str = match check!(read_stream.read(&mut read_buf)) {
-1|0 => panic!("shouldn't happen"),
- n => str::from_utf8(read_buf.index(&(0..n))).unwrap().to_string()
+ n => str::from_utf8(&read_buf[0..n]).unwrap().to_string()
};
assert_eq!(read_str.as_slice(), message);
}
//! Readers and Writers for in-memory buffers
use cmp::min;
-use ops::Index;
use option::Option::None;
use result::Result::{Err, Ok};
use io;
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
- let input = self.buf.index(&(self.pos.. (self.pos + write_len)));
+ let input = &self.buf[self.pos.. (self.pos + write_len)];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
#[inline]
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> {
if self.pos < self.buf.len() {
- Ok(self.buf.index(&(self.pos..)))
+ Ok(&self.buf[self.pos..])
} else {
Err(io::standard_error(io::EndOfFile))
}
let write_len = min(buf.len(), self.len());
{
- let input = self.index(&(0..write_len));
+ let input = &self[0..write_len];
let output = buf.slice_to_mut(write_len);
slice::bytes::copy_memory(output, input);
}
#[inline]
fn consume(&mut self, amt: uint) {
- *self = self.index(&(amt..));
+ *self = &self[amt..];
}
}
Ok(())
} else {
- slice::bytes::copy_memory(dst, src.index(&(0..dst_len)));
+ slice::bytes::copy_memory(dst, &src[0..dst_len]);
self.pos += dst_len;
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
- let input = self.buf.index(&(self.pos.. (self.pos + write_len)));
+ let input = &self.buf[self.pos.. (self.pos + write_len)];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
#[inline]
fn fill_buf(&mut self) -> IoResult<&[u8]> {
if self.pos < self.buf.len() {
- Ok(self.buf.index(&(self.pos..)))
+ Ok(&self.buf[self.pos..])
} else {
Err(io::standard_error(io::EndOfFile))
}
extern crate "test" as test_crate;
use io::{SeekSet, SeekCur, SeekEnd, Reader, Writer, Seek};
use prelude::v1::{Ok, Err, range, Vec, Buffer, AsSlice, SliceExt};
- use prelude::v1::{IteratorExt, Index};
+ use prelude::v1::IteratorExt;
use io;
use iter::repeat;
use self::test_crate::Bencher;
assert_eq!(buf, b);
assert_eq!(reader.read(&mut buf), Ok(3));
let b: &[_] = &[5, 6, 7];
- assert_eq!(buf.index(&(0..3)), b);
+ assert_eq!(&buf[0..3], b);
assert!(reader.read(&mut buf).is_err());
let mut reader = MemReader::new(vec!(0, 1, 2, 3, 4, 5, 6, 7));
assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3));
assert_eq!(buf.as_slice(), b);
assert_eq!(reader.read(&mut buf), Ok(3));
let b: &[_] = &[5, 6, 7];
- assert_eq!(buf.index(&(0..3)), b);
+ assert_eq!(&buf[0..3], b);
assert!(reader.read(&mut buf).is_err());
let mut reader = &mut in_buf.as_slice();
assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3));
assert_eq!(buf, b);
assert_eq!(reader.read(&mut buf), Ok(3));
let b: &[_] = &[5, 6, 7];
- assert_eq!(buf.index(&(0..3)), b);
+ assert_eq!(&buf[0..3], b);
assert!(reader.read(&mut buf).is_err());
let mut reader = BufReader::new(in_buf.as_slice());
assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3));
use iter::{Iterator, IteratorExt};
use marker::Sized;
use mem::transmute;
-use ops::{FnOnce, Index};
+use ops::FnOnce;
use option::Option;
use option::Option::{Some, None};
use os;
fn write_char(&mut self, c: char) -> IoResult<()> {
let mut buf = [0u8; 4];
let n = c.encode_utf8(buf.as_mut_slice()).unwrap_or(0);
- self.write(buf.index(&(0..n)))
+ self.write(&buf[0..n])
}
/// Write the result of passing n through `int::to_str_bytes`.
};
match available.iter().position(|&b| b == byte) {
Some(i) => {
- res.push_all(available.index(&(0..(i + 1))));
+ res.push_all(&available[0..(i + 1)]);
used = i + 1;
break
}
}
}
}
- match str::from_utf8(buf.index(&(0..width))).ok() {
+ match str::from_utf8(&buf[0..width]).ok() {
Some(s) => Ok(s.char_at(0)),
None => Err(standard_error(InvalidInput))
}
use io::{self, IoResult, IoError};
use io::net;
use iter::{Iterator, IteratorExt};
-use ops::{FnOnce, FnMut, Index};
+use ops::{FnOnce, FnMut};
use option::Option;
use option::Option::{None, Some};
use result::Result::{Ok, Err};
let mut tail = [0u16; 8];
let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size);
- Some(ipv6_addr_from_head_tail(head.index(&(0..head_size)), tail.index(&(0..tail_size))))
+ Some(ipv6_addr_from_head_tail(&head[0..head_size], &tail[0..tail_size]))
}
fn read_ipv6_addr(&mut self) -> Option<IpAddr> {
impl<R: Buffer> Buffer for LimitReader<R> {
fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {
let amt = try!(self.inner.fill_buf());
- let buf = amt.index(&(0..cmp::min(amt.len(), self.limit)));
+ let buf = &amt[0..cmp::min(amt.len(), self.limit)];
if buf.len() == 0 {
Err(io::standard_error(io::EndOfFile))
} else {
impl<R: Reader, W: Writer> Reader for TeeReader<R, W> {
fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {
self.reader.read(buf).and_then(|len| {
- self.writer.write(buf.index_mut(&(0..len))).map(|()| len)
+ self.writer.write(&mut buf[0..len]).map(|()| len)
})
}
}
Err(ref e) if e.kind == io::EndOfFile => return Ok(()),
Err(e) => return Err(e),
};
- try!(w.write(buf.index(&(0..len))));
+ try!(w.write(&buf[0..len]));
}
}
use iter::IteratorExt;
use option::Option;
use option::Option::{None, Some};
-use ops::{FullRange, Index};
+use ops::FullRange;
use str;
use str::StrExt;
use string::{String, CowString};
match name.rposition_elem(&dot) {
None | Some(0) => name,
Some(1) if name == b".." => name,
- Some(pos) => name.index(&(0..pos))
+ Some(pos) => &name[0..pos]
}
})
}
match name.rposition_elem(&dot) {
None | Some(0) => None,
Some(1) if name == b".." => None,
- Some(pos) => Some(name.index(&((pos+1)..)))
+ Some(pos) => Some(&name[(pos+1)..])
}
}
}
let extlen = extension.container_as_bytes().len();
match (name.rposition_elem(&dot), extlen) {
(None, 0) | (Some(0), 0) => None,
- (Some(idx), 0) => Some(name.index(&(0..idx)).to_vec()),
+ (Some(idx), 0) => Some(name[0..idx].to_vec()),
(idx, extlen) => {
let idx = match idx {
None | Some(0) => name.len(),
let mut v;
v = Vec::with_capacity(idx + extlen + 1);
- v.push_all(name.index(&(0..idx)));
+ v.push_all(&name[0..idx]);
v.push(dot);
v.push_all(extension.container_as_bytes());
Some(v)
}
#[inline]
fn container_as_str(&self) -> Option<&str> {
- Some(self.index(&FullRange))
+ Some(&self[])
}
#[inline]
fn is_str(_: Option<&String>) -> bool { true }
impl BytesContainer for Vec<u8> {
#[inline]
fn container_as_bytes(&self) -> &[u8] {
- self.index(&FullRange)
+ &self[]
}
}
use io::Writer;
use iter::{AdditiveIterator, Extend};
use iter::{Iterator, IteratorExt, Map};
-use ops::Index;
use marker::Sized;
use option::Option::{self, Some, None};
use slice::{AsSlice, Split, SliceExt, SliceConcatExt};
None => {
self.repr = Path::normalize(filename);
}
- Some(idx) if self.repr.index(&((idx+1)..)) == b".." => {
+ Some(idx) if &self.repr[(idx+1)..] == b".." => {
let mut v = Vec::with_capacity(self.repr.len() + 1 + filename.len());
v.push_all(self.repr.as_slice());
v.push(SEP_BYTE);
}
Some(idx) => {
let mut v = Vec::with_capacity(idx + 1 + filename.len());
- v.push_all(self.repr.index(&(0..(idx+1))));
+ v.push_all(&self.repr[0..(idx+1)]);
v.push_all(filename);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
match self.sepidx {
None if b".." == self.repr => self.repr.as_slice(),
None => dot_static,
- Some(0) => self.repr.index(&(0..1)),
- Some(idx) if self.repr.index(&((idx+1)..)) == b".." => self.repr.as_slice(),
- Some(idx) => self.repr.index(&(0..idx))
+ Some(0) => &self.repr[0..1],
+ Some(idx) if &self.repr[(idx+1)..] == b".." => self.repr.as_slice(),
+ Some(idx) => &self.repr[0..idx]
}
}
None if b"." == self.repr ||
b".." == self.repr => None,
None => Some(self.repr.as_slice()),
- Some(idx) if self.repr.index(&((idx+1)..)) == b".." => None,
- Some(0) if self.repr.index(&(1..)).is_empty() => None,
- Some(idx) => Some(self.repr.index(&((idx+1)..)))
+ Some(idx) if &self.repr[(idx+1)..] == b".." => None,
+ Some(0) if self.repr[1..].is_empty() => None,
+ Some(idx) => Some(&self.repr[(idx+1)..])
}
}
// borrowck is being very picky
let val = {
let is_abs = !v.as_slice().is_empty() && v.as_slice()[0] == SEP_BYTE;
- let v_ = if is_abs { v.as_slice().index(&(1..)) } else { v.as_slice() };
+ let v_ = if is_abs { &v.as_slice()[1..] } else { v.as_slice() };
let comps = normalize_helper(v_, is_abs);
match comps {
None => None,
/// A path of "/" yields no components. A path of "." yields one component.
pub fn components<'a>(&'a self) -> Components<'a> {
let v = if self.repr[0] == SEP_BYTE {
- self.repr.index(&(1..))
+ &self.repr[1..]
} else { self.repr.as_slice() };
let is_sep_byte: fn(&u8) -> bool = is_sep_byte; // coerce to fn ptr
let mut ret = v.split(is_sep_byte);
use iter::{Iterator, IteratorExt, Map, repeat};
use mem;
use option::Option::{self, Some, None};
-use ops::{FullRange, Index};
+use ops::FullRange;
use slice::{SliceExt, SliceConcatExt};
use str::{SplitTerminator, FromStr, StrExt};
use string::{String, ToString};
s.push_str("..");
s.push(SEP);
s.push_str(filename);
- self.update_normalized(s.index(&FullRange));
+ self.update_normalized(&s[]);
}
None => {
self.update_normalized(filename);
}
- Some((_,idxa,end)) if self.repr.index(&(idxa..end)) == ".." => {
+ Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
let mut s = String::with_capacity(end + 1 + filename.len());
- s.push_str(self.repr.index(&(0..end)));
+ s.push_str(&self.repr[0..end]);
s.push(SEP);
s.push_str(filename);
- self.update_normalized(s.index(&FullRange));
+ self.update_normalized(&s[]);
}
Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
let mut s = String::with_capacity(idxb + filename.len());
- s.push_str(self.repr.index(&(0..idxb)));
+ s.push_str(&self.repr[0..idxb]);
s.push_str(filename);
- self.update_normalized(s.index(&FullRange));
+ self.update_normalized(&s[]);
}
Some((idxb,_,_)) => {
let mut s = String::with_capacity(idxb + 1 + filename.len());
- s.push_str(self.repr.index(&(0..idxb)));
+ s.push_str(&self.repr[0..idxb]);
s.push(SEP);
s.push_str(filename);
- self.update_normalized(s.index(&FullRange));
+ self.update_normalized(&s[]);
}
}
}
let path = path.container_as_str().unwrap();
fn is_vol_abs(path: &str, prefix: Option<PathPrefix>) -> bool {
// assume prefix is Some(DiskPrefix)
- let rest = path.index(&(prefix_len(prefix)..));
+ let rest = &path[prefix_len(prefix)..];
!rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char)
}
fn shares_volume(me: &Path, path: &str) -> bool {
// path is assumed to have a prefix of Some(DiskPrefix)
- let repr = me.repr.index(&FullRange);
+ let repr = &me.repr[];
match me.prefix {
Some(DiskPrefix) => {
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase()
else { None };
let pathlen = path_.as_ref().map_or(path.len(), |p| p.len());
let mut s = String::with_capacity(me.repr.len() + 1 + pathlen);
- s.push_str(me.repr.index(&FullRange));
+ s.push_str(&me.repr[]);
let plen = me.prefix_len();
// if me is "C:" we don't want to add a path separator
match me.prefix {
}
match path_ {
None => s.push_str(path),
- Some(p) => s.push_str(p.index(&FullRange)),
+ Some(p) => s.push_str(&p[]),
};
- me.update_normalized(s.index(&FullRange))
+ me.update_normalized(&s[])
}
if !path.is_empty() {
match prefix {
Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => {
// cwd-relative path, self is on the same volume
- append_path(self, path.index(&(prefix_len(prefix)..)));
+ append_path(self, &path[prefix_len(prefix)..]);
}
Some(_) => {
// absolute path, or cwd-relative and self is not same volume
/// Always returns a `Some` value.
#[inline]
fn as_str<'a>(&'a self) -> Option<&'a str> {
- Some(self.repr.index(&FullRange))
+ Some(&self.repr[])
}
#[inline]
/// Always returns a `Some` value.
fn dirname_str<'a>(&'a self) -> Option<&'a str> {
Some(match self.sepidx_or_prefix_len() {
- None if ".." == self.repr => self.repr.index(&FullRange),
+ None if ".." == self.repr => &self.repr[],
None => ".",
- Some((_,idxa,end)) if self.repr.index(&(idxa..end)) == ".." => {
- self.repr.index(&FullRange)
+ Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
+ &self.repr[]
}
- Some((idxb,_,end)) if self.repr.index(&(idxb..end)) == "\\" => {
- self.repr.index(&FullRange)
+ Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => {
+ &self.repr[]
}
- Some((0,idxa,_)) => self.repr.index(&(0..idxa)),
+ Some((0,idxa,_)) => &self.repr[0..idxa],
Some((idxb,idxa,_)) => {
match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => {
- self.repr.index(&(0..idxa))
+ &self.repr[0..idxa]
}
- _ => self.repr.index(&(0..idxb))
+ _ => &self.repr[0..idxb]
}
}
})
/// See `GenericPath::filename_str` for info.
/// Always returns a `Some` value if `filename` returns a `Some` value.
fn filename_str<'a>(&'a self) -> Option<&'a str> {
- let repr = self.repr.index(&FullRange);
+ let repr = &self.repr[];
match self.sepidx_or_prefix_len() {
None if "." == repr || ".." == repr => None,
None => Some(repr),
- Some((_,idxa,end)) if repr.index(&(idxa..end)) == ".." => None,
+ Some((_,idxa,end)) if &repr[idxa..end] == ".." => None,
Some((_,idxa,end)) if idxa == end => None,
- Some((_,idxa,end)) => Some(repr.index(&(idxa..end)))
+ Some((_,idxa,end)) => Some(&repr[idxa..end])
}
}
true
}
Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false,
- Some((idxb,_,end)) if self.repr.index(&(idxb..end)) == "\\" => false,
+ Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => false,
Some((idxb,idxa,_)) => {
let trunc = match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => {
if self.prefix.is_some() {
Some(Path::new(match self.prefix {
Some(DiskPrefix) if self.is_absolute() => {
- self.repr.index(&(0..(self.prefix_len()+1)))
+ &self.repr[0..(self.prefix_len()+1)]
}
Some(VerbatimDiskPrefix) => {
- self.repr.index(&(0..(self.prefix_len()+1)))
+ &self.repr[0..(self.prefix_len()+1)]
}
- _ => self.repr.index(&(0..self.prefix_len()))
+ _ => &self.repr[0..self.prefix_len()]
}))
} else if is_vol_relative(self) {
- Some(Path::new(self.repr.index(&(0..1))))
+ Some(Path::new(&self.repr[0..1]))
} else {
None
}
fn is_absolute(&self) -> bool {
match self.prefix {
Some(DiskPrefix) => {
- let rest = self.repr.index(&(self.prefix_len()..));
+ let rest = &self.repr[self.prefix_len()..];
rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE
}
Some(_) => true,
/// Does not distinguish between absolute and cwd-relative paths, e.g.
/// C:\foo and C:foo.
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
- let repr = self.repr.index(&FullRange);
+ let repr = &self.repr[];
let s = match self.prefix {
Some(_) => {
let plen = self.prefix_len();
if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
- repr.index(&((plen+1)..))
- } else { repr.index(&(plen..)) }
+ &repr[(plen+1)..]
+ } else { &repr[plen..] }
}
- None if repr.as_bytes()[0] == SEP_BYTE => repr.index(&(1..)),
+ None if repr.as_bytes()[0] == SEP_BYTE => &repr[1..],
None => repr
};
let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr
}
fn equiv_prefix(&self, other: &Path) -> bool {
- let s_repr = self.repr.index(&FullRange);
- let o_repr = other.repr.index(&FullRange);
+ let s_repr = &self.repr[];
+ let o_repr = &other.repr[];
match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() &&
o_repr.as_bytes()[4].to_ascii_lowercase()
}
(Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
- s_repr.index(&(2..self.prefix_len())) == o_repr.index(&(8..other.prefix_len()))
+ &s_repr[2..self.prefix_len()] == &o_repr[8..other.prefix_len()]
}
(Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => {
- s_repr.index(&(8..self.prefix_len())) == o_repr.index(&(2..other.prefix_len()))
+ &s_repr[8..self.prefix_len()] == &o_repr[2..other.prefix_len()]
}
(None, None) => true,
(a, b) if a == b => {
- s_repr.index(&(0..self.prefix_len())) == o_repr.index(&(0..other.prefix_len()))
+ &s_repr[0..self.prefix_len()] == &o_repr[0..other.prefix_len()]
}
_ => false
}
match prefix.unwrap() {
DiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
- let mut s = String::from_str(s.index(&(0..len)));
+ let mut s = String::from_str(&s[0..len]);
unsafe {
let v = s.as_mut_vec();
v[0] = (*v)[0].to_ascii_uppercase();
}
VerbatimDiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
- let mut s = String::from_str(s.index(&(0..len)));
+ let mut s = String::from_str(&s[0..len]);
unsafe {
let v = s.as_mut_vec();
v[4] = (*v)[4].to_ascii_uppercase();
_ => {
let plen = prefix_len(prefix);
if s.len() > plen {
- Some(String::from_str(s.index(&(0..plen))))
+ Some(String::from_str(&s[0..plen]))
} else { None }
}
}
} else if is_abs && comps.is_empty() {
Some(repeat(SEP).take(1).collect())
} else {
- let prefix_ = s.index(&(0..prefix_len(prefix)));
+ let prefix_ = &s[0..prefix_len(prefix)];
let n = prefix_.len() +
if is_abs { comps.len() } else { comps.len() - 1} +
comps.iter().map(|v| v.len()).sum();
s.push(':');
}
Some(VerbatimDiskPrefix) => {
- s.push_str(prefix_.index(&(0..4)));
+ s.push_str(&prefix_[0..4]);
s.push(prefix_.as_bytes()[4].to_ascii_uppercase() as char);
- s.push_str(prefix_.index(&(5..)));
+ s.push_str(&prefix_[5..]);
}
Some(UNCPrefix(a,b)) => {
s.push_str("\\\\");
- s.push_str(prefix_.index(&(2..(a+2))));
+ s.push_str(&prefix_[2..(a+2)]);
s.push(SEP);
- s.push_str(prefix_.index(&((3+a)..(3+a+b))));
+ s.push_str(&prefix_[(3+a)..(3+a+b)]);
}
Some(_) => s.push_str(prefix_),
None => ()
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
- self.repr.index(&(0..(self.repr.len()-1)))
- } else { self.repr.index(&FullRange) };
+ &self.repr[0..(self.repr.len()-1)]
+ } else { &self.repr[] };
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
is_sep
} else {
/// non-verbatim, the non-verbatim version is returned.
/// Otherwise, None is returned.
pub fn make_non_verbatim(path: &Path) -> Option<Path> {
- let repr = path.repr.index(&FullRange);
+ let repr = &path.repr[];
let new_path = match path.prefix {
Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None,
Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()),
Some(VerbatimDiskPrefix) => {
// \\?\D:\
- Path::new(repr.index(&(4..)))
+ Path::new(&repr[4..])
}
Some(VerbatimUNCPrefix(_,_)) => {
// \\?\UNC\server\share
- Path::new(format!(r"\{}", repr.index(&(7..))))
+ Path::new(format!(r"\{}", &repr[7..]))
}
};
if new_path.prefix.is_none() {
return None;
}
// now ensure normalization didn't change anything
- if repr.index(&(path.prefix_len()..)) ==
- new_path.repr.index(&(new_path.prefix_len()..)) {
+ if &repr[path.prefix_len()..] == &new_path.repr[new_path.prefix_len()..] {
Some(new_path)
} else {
None
fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
if path.starts_with("\\\\") {
// \\
- path = path.index(&(2..));
+ path = &path[2..];
if path.starts_with("?\\") {
// \\?\
- path = path.index(&(2..));
+ path = &path[2..];
if path.starts_with("UNC\\") {
// \\?\UNC\server\share
- path = path.index(&(4..));
+ path = &path[4..];
let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) {
Some(x) => x,
None => (path.len(), 0)
}
} else if path.starts_with(".\\") {
// \\.\path
- path = path.index(&(2..));
+ path = &path[2..];
let idx = path.find('\\').unwrap_or(path.len());
return Some(DeviceNSPrefix(idx));
}
None => return None,
Some(x) => x
};
- path = path.index(&((idx_a+1)..));
+ path = &path[(idx_a+1)..];
let idx_b = path.find(f).unwrap_or(path.len());
Some((idx_a, idx_b))
}
is_sep_verbatim
};
let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix)));
- let s_ = s.index(&(prefix_len(prefix)..));
- let s_ = if is_abs { s_.index(&(1..)) } else { s_ };
+ let s_ = &s[prefix_len(prefix)..];
+ let s_ = if is_abs { &s_[1..] } else { s_ };
if is_abs && s_.is_empty() {
return (is_abs, match prefix {
#[stable] #[doc(no_inline)] pub use ops::{Drop, Fn, FnMut, FnOnce};
// TEMPORARY
-#[unstable] #[doc(no_inline)] pub use ops::{Index, IndexMut, FullRange};
+#[unstable] #[doc(no_inline)] pub use ops::FullRange;
// Reexported functions
#[stable] #[doc(no_inline)] pub use mem::drop;
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
let callbacks = {
let amt = CALLBACK_CNT.load(Ordering::SeqCst);
- CALLBACKS.index(&(0..cmp::min(amt, MAX_CALLBACKS)))
+ &CALLBACKS[0..cmp::min(amt, MAX_CALLBACKS)]
};
for cb in callbacks.iter() {
match cb.load(Ordering::SeqCst) {
impl<'a> fmt::Writer for BufWriter<'a> {
fn write_str(&mut self, bytes: &str) -> fmt::Result {
let left = self.buf.slice_from_mut(self.pos);
- let to_write = bytes.as_bytes().index(&(0..cmp::min(bytes.len(), left.len())));
+ let to_write = &bytes.as_bytes()[0..cmp::min(bytes.len(), left.len())];
slice::bytes::copy_memory(left, to_write);
self.pos += to_write.len();
Ok(())
let mut msg = [0u8; 512];
let mut w = BufWriter { buf: &mut msg, pos: 0 };
let _ = write!(&mut w, "{}", args);
- let msg = str::from_utf8(w.buf.index_mut(&(0..w.pos))).unwrap_or("aborted");
+ let msg = str::from_utf8(&w.buf[0..w.pos]).unwrap_or("aborted");
let msg = if msg.is_empty() {"aborted"} else {msg};
// Give some context to the message
// Also as with read(), we use MSG_DONTWAIT to guard ourselves
// against unforeseen circumstances.
let _guard = lock();
- let ptr = buf.index(&(written..)).as_ptr();
+ let ptr = buf[written..].as_ptr();
let len = buf.len() - written;
match retry(|| write(deadline.is_some(), ptr, len)) {
-1 if wouldblock() => {}
let bytes = unsafe { ffi::c_str_to_bytes(&ptr) };
match str::from_utf8(bytes) {
Ok(s) => try!(demangle(w, s)),
- Err(..) => try!(w.write(bytes.index(&(..(bytes.len()-1))))),
+ Err(..) => try!(w.write(&bytes[..(bytes.len()-1)])),
}
}
try!(w.write(&['\n' as u8]));
pub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] {
match v.iter().position(|c| *c == 0) {
// don't include the 0
- Some(i) => v.index(&(0..i)),
+ Some(i) => &v[0..i],
None => v
}
}
}
let ret = unsafe {
libc::WriteFile(self.handle(),
- buf.index(&(offset..)).as_ptr() as libc::LPVOID,
+ buf[offset..].as_ptr() as libc::LPVOID,
(buf.len() - offset) as libc::DWORD,
&mut bytes_written,
&mut overlapped)
impl Decodable for Ident {
fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> {
- Ok(str_to_ident(try!(d.read_str()).index(&FullRange)))
+ Ok(str_to_ident(&try!(d.read_str())[]))
}
}
if !s.is_empty() {
s.push_str("::");
}
- s.push_str(e.index(&FullRange));
+ s.push_str(&e[]);
s
}).to_string()
}
F: FnOnce(Option<&[Attribute]>) -> T,
{
let attrs = match self.get(id) {
- NodeItem(i) => Some(i.attrs.index(&FullRange)),
- NodeForeignItem(fi) => Some(fi.attrs.index(&FullRange)),
+ NodeItem(i) => Some(&i.attrs[]),
+ NodeForeignItem(fi) => Some(&fi.attrs[]),
NodeTraitItem(ref tm) => match **tm {
- RequiredMethod(ref type_m) => Some(type_m.attrs.index(&FullRange)),
- ProvidedMethod(ref m) => Some(m.attrs.index(&FullRange)),
- TypeTraitItem(ref typ) => Some(typ.attrs.index(&FullRange)),
+ RequiredMethod(ref type_m) => Some(&type_m.attrs[]),
+ ProvidedMethod(ref m) => Some(&m.attrs[]),
+ TypeTraitItem(ref typ) => Some(&typ.attrs[]),
},
NodeImplItem(ref ii) => {
match **ii {
- MethodImplItem(ref m) => Some(m.attrs.index(&FullRange)),
- TypeImplItem(ref t) => Some(t.attrs.index(&FullRange)),
+ MethodImplItem(ref m) => Some(&m.attrs[]),
+ TypeImplItem(ref t) => Some(&t.attrs[]),
}
}
- NodeVariant(ref v) => Some(v.node.attrs.index(&FullRange)),
+ NodeVariant(ref v) => Some(&v.node.attrs[]),
// unit/tuple structs take the attributes straight from
// the struct definition.
// FIXME(eddyb) make this work again (requires access to the map).
NodesMatchingSuffix {
map: self,
item_name: parts.last().unwrap(),
- in_which: parts.index(&(0..(parts.len() - 1))),
+ in_which: &parts[0..(parts.len() - 1)],
idx: 0,
}
}
None => return false,
Some((node_id, name)) => (node_id, name),
};
- if part.index(&FullRange) != mod_name.as_str() {
+ if &part[] != mod_name.as_str() {
return false;
}
cursor = self.map.get_parent(mod_id);
// We are looking at some node `n` with a given name and parent
// id; do their names match what I am seeking?
fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool {
- name.as_str() == self.item_name.index(&FullRange) &&
+ name.as_str() == &self.item_name[] &&
self.suffix_matches(parent_of_n)
}
}
fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
let id_str = format!(" (id={})", id);
- let id_str = if include_id { id_str.index(&FullRange) } else { "" };
+ let id_str = if include_id { &id_str[] } else { "" };
match map.find(id) {
Some(NodeItem(item)) => {
match *trait_ref {
Some(ref trait_ref) => {
pretty.push('.');
- pretty.push_str(pprust::path_to_string(&trait_ref.path).index(&FullRange));
+ pretty.push_str(&pprust::path_to_string(&trait_ref.path)[]);
}
None => {}
}
- token::gensym_ident(pretty.index(&FullRange))
+ token::gensym_ident(&pretty[])
}
pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod {
pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
(a.span == b.span)
&& (a.global == b.global)
- && (segments_name_eq(a.segments.index(&FullRange), b.segments.index(&FullRange)))
+ && (segments_name_eq(&a.segments[], &b.segments[]))
}
// are two arrays of segments equal when compared unhygienically?
#[test] fn idents_name_eq_test() {
assert!(segments_name_eq(
- [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
- .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().index(&FullRange),
- [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
- .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().index(&FullRange)));
+ &[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[],
+ &[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[]));
assert!(!segments_name_eq(
- [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
- .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().index(&FullRange),
- [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
- .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().index(&FullRange)));
+ &[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[],
+ &[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>()[]));
}
}
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
match self.node {
- MetaList(_, ref l) => Some(l.index(&FullRange)),
+ MetaList(_, ref l) => Some(&l[]),
_ => None
}
}
let comment = self.value_str().unwrap();
let meta = mk_name_value_item_str(
InternedString::new("doc"),
- token::intern_and_get_ident(strip_doc_comment_decoration(
- comment.get()).index(&FullRange)));
+ token::intern_and_get_ident(&strip_doc_comment_decoration(
+ comment.get())[]));
if self.node.style == ast::AttrOuter {
f(&mk_attr_outer(self.node.id, meta))
} else {
}
MetaList(ref n, ref items) if *n == "inline" => {
mark_used(attr);
- if contains_name(items.index(&FullRange), "always") {
+ if contains_name(&items[], "always") {
InlineAlways
- } else if contains_name(items.index(&FullRange), "never") {
+ } else if contains_name(&items[], "never") {
InlineNever
} else {
InlineHint
if !set.insert(name.clone()) {
diagnostic.span_fatal(meta.span,
- format!("duplicate meta item `{}`", name).index(&FullRange));
+ &format!("duplicate meta item `{}`", name)[]);
}
}
}
lines.get(line_number).map(|&line| {
let begin: BytePos = line - self.start_pos;
let begin = begin.to_uint();
- let slice = self.src.index(&(begin..));
+ let slice = &self.src[begin..];
match slice.find('\n') {
- Some(e) => slice.index(&(0..e)),
+ Some(e) => &slice[0..e],
None => slice
}.to_string()
})
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
let mut src = if src.starts_with("\u{feff}") {
- String::from_str(src.index(&(3..)))
+ String::from_str(&src[3..])
} else {
- String::from_str(src.index(&FullRange))
+ String::from_str(&src[])
};
// Append '\n' in case it's not already there.
if begin.fm.start_pos != end.fm.start_pos {
None
} else {
- Some(begin.fm.src.index(&(begin.pos.to_uint()..
- end.pos.to_uint())).to_string())
+ Some((&begin.fm.src[begin.pos.to_uint()..end.pos.to_uint()]).to_string())
}
}
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
- self.span_bug(sp, format!("unimplemented {}", msg).index(&FullRange));
+ self.span_bug(sp, &format!("unimplemented {}", msg)[]);
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
self.err_count.get());
}
}
- self.fatal(s.index(&FullRange));
+ self.fatal(&s[]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
- self.bug(format!("unimplemented {}", msg).index(&FullRange));
+ self.bug(&format!("unimplemented {}", msg)[]);
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
- try!(t.write_str(msg.index(&(0..(msg.len()-1)))));
+ try!(t.write_str(&msg[0..(msg.len()-1)]));
try!(t.reset());
try!(t.write_str("\n"));
} else {
}
try!(print_maybe_styled(dst,
- format!("{}: ", lvl.to_string()).index(&FullRange),
+ &format!("{}: ", lvl.to_string())[],
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
- format!("{}", msg).index(&FullRange),
+ &format!("{}", msg)[],
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
- try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).index(&FullRange), style));
+ try!(print_maybe_styled(dst, &format!(" [{}]", code.clone())[], style));
}
None => ()
}
// the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
let ses = cm.span_to_string(span_end);
- try!(print_diagnostic(dst, ses.index(&FullRange), lvl, msg, code));
+ try!(print_diagnostic(dst, &ses[], lvl, msg, code));
if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, lines));
}
} else {
- try!(print_diagnostic(dst, ss.index(&FullRange), lvl, msg, code));
+ try!(print_diagnostic(dst, &ss[], lvl, msg, code));
if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, lines));
}
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
- try!(print_diagnostic(dst, ss.index(&FullRange), Help,
- format!("pass `--explain {}` to see a detailed \
- explanation", code).index(&FullRange), None));
+ try!(print_diagnostic(dst, &ss[], Help,
+ &format!("pass `--explain {}` to see a detailed \
+ explanation", code)[], None));
}
None => ()
},
let fm = &*lines.file;
let mut elided = false;
- let mut display_lines = lines.lines.index(&FullRange);
+ let mut display_lines = &lines.lines[];
if display_lines.len() > MAX_LINES {
- display_lines = display_lines.index(&(0u..MAX_LINES));
+ display_lines = &display_lines[0u..MAX_LINES];
elided = true;
}
// Print the offending lines
}
}
try!(print_maybe_styled(err,
- format!("{}\n", s).index(&FullRange),
+ &format!("{}\n", s)[],
term::attr::ForegroundColor(lvl.color())));
}
Ok(())
-> io::IoResult<()> {
let fm = &*lines.file;
- let lines = lines.lines.index(&FullRange);
+ let lines = &lines.lines[];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0]) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
s.push('^');
s.push('\n');
print_maybe_styled(w,
- s.index(&FullRange),
+ &s[],
term::attr::ForegroundColor(lvl.color()))
}
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!")
};
- try!(print_diagnostic(w, ss.index(&FullRange), Note,
- format!("in expansion of {}{}{}", pre,
+ try!(print_diagnostic(w, &ss[], Note,
+ &format!("in expansion of {}{}{}", pre,
ei.callee.name,
- post).index(&FullRange), None));
+ post)[], None));
let ss = cm.span_to_string(ei.call_site);
- try!(print_diagnostic(w, ss.index(&FullRange), Note, "expansion site", None));
+ try!(print_diagnostic(w, &ss[], Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
{
match opt {
Some(t) => t,
- None => diag.handler().bug(msg().index(&FullRange)),
+ None => diag.handler().bug(&msg()[]),
}
}
with_used_diagnostics(|diagnostics| {
match diagnostics.insert(code.name, span) {
Some(previous_span) => {
- ecx.span_warn(span, format!(
+ ecx.span_warn(span, &format!(
"diagnostic code {} already used", token::get_ident(code).get()
- ).index(&FullRange));
+ )[]);
ecx.span_note(previous_span, "previous invocation");
},
None => ()
};
with_registered_diagnostics(|diagnostics| {
if diagnostics.insert(code.name, description).is_some() {
- ecx.span_err(span, format!(
+ ecx.span_err(span, &format!(
"diagnostic code {} already registered", token::get_ident(*code).get()
- ).index(&FullRange));
+ )[]);
}
});
- let sym = Ident::new(token::gensym((
+ let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + token::get_ident(*code).get()
- ).index(&FullRange)));
+ )[]));
MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
}
let output = match constraint.get().slice_shift_char() {
Some(('=', _)) => None,
Some(('+', operand)) => {
- Some(token::intern_and_get_ident(format!(
- "={}", operand).index(&FullRange)))
+ Some(token::intern_and_get_ident(&format!(
+ "={}", operand)[]))
}
_ => {
cx.span_err(span, "output operand constraint lacks '=' or '+'");
pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
pub fn mod_path(&self) -> Vec<ast::Ident> {
let mut v = Vec::new();
- v.push(token::str_to_ident(self.ecfg.crate_name.index(&FullRange)));
+ v.push(token::str_to_ident(&self.ecfg.crate_name[]));
v.extend(self.mod_path.iter().map(|a| *a));
return v;
}
self.recursion_count += 1;
if self.recursion_count > self.ecfg.recursion_limit {
self.span_fatal(ei.call_site,
- format!("recursion limit reached while expanding the macro `{}`",
- ei.callee.name).index(&FullRange));
+ &format!("recursion limit reached while expanding the macro `{}`",
+ ei.callee.name)[]);
}
let mut call_site = ei.call_site;
tts: &[ast::TokenTree],
name: &str) {
if tts.len() != 0 {
- cx.span_err(sp, format!("{} takes no arguments", name).index(&FullRange));
+ cx.span_err(sp, &format!("{} takes no arguments", name)[]);
}
}
-> Option<String> {
let mut p = cx.new_parser_from_tts(tts);
if p.token == token::Eof {
- cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange));
+ cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
return None
}
let ret = cx.expander().fold_expr(p.parse_expr());
if p.token != token::Eof {
- cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange));
+ cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
}
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
s.get().to_string()
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
let loc = self.codemap().lookup_char_pos(span.lo);
let expr_file = self.expr_str(span,
- token::intern_and_get_ident(loc.file
- .name.index(&FullRange)));
+ token::intern_and_get_ident(&loc.file.name[]));
let expr_line = self.expr_uint(span, loc.line);
let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
ast::LitInt(i, ast::UnsignedIntLit(_)) |
ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) |
ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => {
- accumulator.push_str(format!("{}", i).index(&FullRange));
+ accumulator.push_str(&format!("{}", i)[]);
}
ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) |
ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => {
- accumulator.push_str(format!("-{}", i).index(&FullRange));
+ accumulator.push_str(&format!("-{}", i)[]);
}
ast::LitBool(b) => {
- accumulator.push_str(format!("{}", b).index(&FullRange));
+ accumulator.push_str(&format!("{}", b)[]);
}
ast::LitByte(..) |
ast::LitBinary(..) => {
}
base::MacExpr::new(cx.expr_str(
sp,
- token::intern_and_get_ident(accumulator.index(&FullRange))))
+ token::intern_and_get_ident(&accumulator[])))
}
}
}
}
- let res = str_to_ident(res_str.index(&FullRange));
+ let res = str_to_ident(&res_str[]);
let e = P(ast::Expr {
id: ast::DUMMY_NODE_ID,
},
EnumNonMatchingCollapsed (..) => {
cx.span_bug(trait_span,
- format!("non-matching enum variants in \
- `deriving({})`", name).index(&FullRange))
+ &format!("non-matching enum variants in \
+ `deriving({})`", name)[])
}
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span,
- format!("static method in `deriving({})`", name).index(&FullRange))
+ &format!("static method in `deriving({})`", name)[])
}
}
Some(i) => i,
None => {
cx.span_bug(trait_span,
- format!("unnamed field in normal struct in \
- `deriving({})`", name).index(&FullRange))
+ &format!("unnamed field in normal struct in \
+ `deriving({})`", name)[])
}
};
cx.field_imm(field.span, ident, subcall(field))
} else {
let fields = fields.iter().enumerate().map(|(i, &span)| {
getarg(cx, span,
- token::intern_and_get_ident(format!("_field{}",
- i).index(&FullRange)),
+ token::intern_and_get_ident(&format!("_field{}", i)[]),
i)
}).collect();
let name = match name {
Some(id) => token::get_ident(id),
None => {
- token::intern_and_get_ident(format!("_field{}", i).index(&FullRange))
+ token::intern_and_get_ident(&format!("_field{}", i)[])
}
};
let enc = cx.expr_method_call(span, self_.clone(),
self,
struct_def,
type_ident,
- self_args.index(&FullRange),
- nonself_args.index(&FullRange))
+ &self_args[],
+ &nonself_args[])
} else {
method_def.expand_struct_method_body(cx,
self,
struct_def,
type_ident,
- self_args.index(&FullRange),
- nonself_args.index(&FullRange))
+ &self_args[],
+ &nonself_args[])
};
method_def.create_method(cx,
self,
enum_def,
type_ident,
- self_args.index(&FullRange),
- nonself_args.index(&FullRange))
+ &self_args[],
+ &nonself_args[])
} else {
method_def.expand_enum_method_body(cx,
self,
enum_def,
type_ident,
self_args,
- nonself_args.index(&FullRange))
+ &nonself_args[])
};
method_def.create_method(cx,
for (i, ty) in self.args.iter().enumerate() {
let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics);
- let ident = cx.ident_of(format!("__arg_{}", i).index(&FullRange));
+ let ident = cx.ident_of(&format!("__arg_{}", i)[]);
arg_tys.push((ident, ast_ty));
let arg_expr = cx.expr_ident(trait_.span, ident);
trait_.create_struct_pattern(cx,
struct_path,
struct_def,
- format!("__self_{}",
- i).index(&FullRange),
+ &format!("__self_{}",
+ i)[],
ast::MutImmutable);
patterns.push(pat);
raw_fields.push(ident_expr);
.collect::<Vec<String>>();
let self_arg_idents = self_arg_names.iter()
- .map(|name|cx.ident_of(name.index(&FullRange)))
+ .map(|name|cx.ident_of(&name[]))
.collect::<Vec<ast::Ident>>();
// The `vi_idents` will be bound, solely in the catch-all, to
// a series of let statements mapping each self_arg to a uint
// corresponding to its variant index.
let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
- .map(|name| { let vi_suffix = format!("{}_vi", name.index(&FullRange));
- cx.ident_of(vi_suffix.index(&FullRange)) })
+ .map(|name| { let vi_suffix = format!("{}_vi", &name[]);
+ cx.ident_of(&vi_suffix[]) })
.collect::<Vec<ast::Ident>>();
// Builds, via callback to call_substructure_method, the
// delegated expression that handles the catch-all case,
// using `__variants_tuple` to drive logic if necessary.
let catch_all_substructure = EnumNonMatchingCollapsed(
- self_arg_idents, variants.index(&FullRange), vi_idents.index(&FullRange));
+ self_arg_idents, &variants[], &vi_idents[]);
// These arms are of the form:
// (Variant1, Variant1, ...) => Body1
let mut subpats = Vec::with_capacity(self_arg_names.len());
let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1);
let first_self_pat_idents = {
- let (p, idents) = mk_self_pat(cx, self_arg_names[0].index(&FullRange));
+ let (p, idents) = mk_self_pat(cx, &self_arg_names[0][]);
subpats.push(p);
idents
};
for self_arg_name in self_arg_names.tail().iter() {
- let (p, idents) = mk_self_pat(cx, self_arg_name.index(&FullRange));
+ let (p, idents) = mk_self_pat(cx, &self_arg_name[]);
subpats.push(p);
self_pats_idents.push(idents);
}
&**variant,
field_tuples);
let arm_expr = self.call_substructure_method(
- cx, trait_, type_ident, self_args.index(&FullRange), nonself_args,
+ cx, trait_, type_ident, &self_args[], nonself_args,
&substructure);
cx.arm(sp, vec![single_pat], arm_expr)
}
let arm_expr = self.call_substructure_method(
- cx, trait_, type_ident, self_args.index(&FullRange), nonself_args,
+ cx, trait_, type_ident, &self_args[], nonself_args,
&catch_all_substructure);
// Builds the expression:
cx.span_bug(sp, "a struct with named and unnamed fields in `derive`");
}
};
- let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange));
+ let ident = cx.ident_of(&format!("{}_{}", prefix, i)[]);
paths.push(codemap::Spanned{span: sp, node: ident});
let val = cx.expr(
sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident)))));
let mut ident_expr = Vec::new();
for (i, va) in variant_args.iter().enumerate() {
let sp = self.set_expn_info(cx, va.ty.span);
- let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange));
+ let ident = cx.ident_of(&format!("{}_{}", prefix, i)[]);
let path1 = codemap::Spanned{span: sp, node: ident};
paths.push(path1);
let expr_path = cx.expr_path(cx.path_ident(sp, ident));
field.span,
old,
field.self_.clone(),
- field.other.index(&FullRange))
+ &field.other[])
})
} else {
all_fields.iter().rev().fold(base, |old, field| {
field.span,
old,
field.self_.clone(),
- field.other.index(&FullRange))
+ &field.other[])
})
}
},
EnumNonMatchingCollapsed(ref all_args, _, tuple) =>
- enum_nonmatch_f(cx, trait_span, (all_args.index(&FullRange), tuple),
+ enum_nonmatch_f(cx, trait_span, (&all_args[], tuple),
substructure.nonself_args),
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span, "static function in `derive`")
f(cx, trait_span, called)
},
EnumNonMatchingCollapsed(ref all_self_args, _, tuple) =>
- enum_nonmatch_f(cx, trait_span, (all_self_args.index(&FullRange), tuple),
+ enum_nonmatch_f(cx, trait_span, (&all_self_args[], tuple),
substructure.nonself_args),
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span, "static function in `derive`")
ref tname => {
cx.span_err(titem.span,
- format!("unknown `derive` \
+ &format!("unknown `derive` \
trait: `{}`",
- *tname).index(&FullRange));
+ *tname)[]);
}
};
}
let formatter = substr.nonself_args[0].clone();
let meth = cx.ident_of("write_fmt");
- let s = token::intern_and_get_ident(format_string.index(&FullRange));
+ let s = token::intern_and_get_ident(&format_string[]);
let format_string = cx.expr_str(span, s);
// phew, not our responsibility any more!
Some(v) => v
};
- let e = match os::getenv(var.index(&FullRange)) {
+ let e = match os::getenv(&var[]) {
None => {
cx.expr_path(cx.path_all(sp,
true,
cx.ident_of("Some")),
vec!(cx.expr_str(sp,
token::intern_and_get_ident(
- s.index(&FullRange)))))
+ &s[]))))
}
};
MacExpr::new(e)
};
let msg = match exprs.next() {
None => {
- token::intern_and_get_ident(format!("environment variable `{}` \
+ token::intern_and_get_ident(&format!("environment variable `{}` \
not defined",
- var).index(&FullRange))
+ var)[])
}
Some(second) => {
match expr_to_string(cx, second, "expected string literal") {
cx.span_err(sp, msg.get());
cx.expr_uint(sp, 0)
}
- Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.index(&FullRange)))
+ Some(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))
};
MacExpr::new(e)
}
None => {
fld.cx.span_err(
pth.span,
- format!("macro undefined: '{}!'",
- extnamestr.get()).index(&FullRange));
+ &format!("macro undefined: '{}!'",
+ extnamestr.get())[]);
// let compilation continue
None
},
});
let fm = fresh_mark();
- let marked_before = mark_tts(tts.index(&FullRange), fm);
+ let marked_before = mark_tts(&tts[], fm);
// The span that we pass to the expanders we want to
// be the root of the call stack. That's the most
let opt_parsed = {
let expanded = expandfun.expand(fld.cx,
mac_span,
- marked_before.index(&FullRange));
+ &marked_before[]);
parse_thunk(expanded)
};
let parsed = match opt_parsed {
None => {
fld.cx.span_err(
pth.span,
- format!("non-expression macro in expression position: {}",
- extnamestr.get().index(&FullRange)
- ).index(&FullRange));
+ &format!("non-expression macro in expression position: {}",
+ &extnamestr.get()[]
+ )[]);
return None;
}
};
_ => {
fld.cx.span_err(
pth.span,
- format!("'{}' is not a tt-style macro",
- extnamestr.get()).index(&FullRange));
+ &format!("'{}' is not a tt-style macro",
+ extnamestr.get())[]);
None
}
}
if valid_ident {
fld.cx.mod_push(it.ident);
}
- let macro_use = contains_macro_use(fld, new_attrs.index(&FullRange));
+ let macro_use = contains_macro_use(fld, &new_attrs[]);
let result = with_exts_frame!(fld.cx.syntax_env,
macro_use,
noop_fold_item(it, fld));
let expanded = match fld.cx.syntax_env.find(&extname.name) {
None => {
fld.cx.span_err(path_span,
- format!("macro undefined: '{}!'",
- extnamestr).index(&FullRange));
+ &format!("macro undefined: '{}!'",
+ extnamestr)[]);
// let compilation continue
return SmallVector::zero();
}
if it.ident.name != parse::token::special_idents::invalid.name {
fld.cx
.span_err(path_span,
- format!("macro {}! expects no ident argument, \
+ &format!("macro {}! expects no ident argument, \
given '{}'",
extnamestr,
- token::get_ident(it.ident)).index(&FullRange));
+ token::get_ident(it.ident))[]);
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
}
});
// mark before expansion:
- let marked_before = mark_tts(tts.index(&FullRange), fm);
- expander.expand(fld.cx, it.span, marked_before.index(&FullRange))
+ let marked_before = mark_tts(&tts[], fm);
+ expander.expand(fld.cx, it.span, &marked_before[])
}
IdentTT(ref expander, span) => {
if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span,
- format!("macro {}! expects an ident argument",
- extnamestr.get()).index(&FullRange));
+ &format!("macro {}! expects an ident argument",
+ extnamestr.get())[]);
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
}
});
// mark before expansion:
- let marked_tts = mark_tts(tts.index(&FullRange), fm);
+ let marked_tts = mark_tts(&tts[], fm);
expander.expand(fld.cx, it.span, it.ident, marked_tts)
}
MacroRulesTT => {
if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span,
- format!("macro_rules! expects an ident argument")
- .index(&FullRange));
+ &format!("macro_rules! expects an ident argument")
+ []);
return SmallVector::zero();
}
fld.cx.bt_push(ExpnInfo {
}
_ => {
fld.cx.span_err(it.span,
- format!("{}! is not legal in item position",
- extnamestr.get()).index(&FullRange));
+ &format!("{}! is not legal in item position",
+ extnamestr.get())[]);
return SmallVector::zero();
}
}
}
None => {
fld.cx.span_err(path_span,
- format!("non-item macro in item position: {}",
- extnamestr.get()).index(&FullRange));
+ &format!("non-item macro in item position: {}",
+ extnamestr.get())[]);
return SmallVector::zero();
}
};
let marked_after = match fld.cx.syntax_env.find(&extname.name) {
None => {
fld.cx.span_err(pth.span,
- format!("macro undefined: '{}!'",
- extnamestr).index(&FullRange));
+ &format!("macro undefined: '{}!'",
+ extnamestr)[]);
// let compilation continue
return DummyResult::raw_pat(span);
}
});
let fm = fresh_mark();
- let marked_before = mark_tts(tts.index(&FullRange), fm);
+ let marked_before = mark_tts(&tts[], fm);
let mac_span = fld.cx.original_span();
let expanded = match expander.expand(fld.cx,
mac_span,
- marked_before.index(&FullRange)).make_pat() {
+ &marked_before[]).make_pat() {
Some(e) => e,
None => {
fld.cx.span_err(
pth.span,
- format!(
+ &format!(
"non-pattern macro in pattern position: {}",
extnamestr.get()
- ).index(&FullRange)
+ )[]
);
return DummyResult::raw_pat(span);
}
}
_ => {
fld.cx.span_err(span,
- format!("{}! is not legal in pattern position",
- extnamestr.get()).index(&FullRange));
+ &format!("{}! is not legal in pattern position",
+ extnamestr.get())[]);
return DummyResult::raw_pat(span);
}
}
node: match node {
MacInvocTT(path, tts, ctxt) => {
MacInvocTT(self.fold_path(path),
- self.fold_tts(tts.index(&FullRange)),
+ self.fold_tts(&tts[]),
mtwt::apply_mark(self.mark, ctxt))
}
},
let string = ident.get();
"xx" == string
}).collect();
- let cxbinds: &[&ast::Ident] = cxbinds.index(&FullRange);
+ let cxbinds: &[&ast::Ident] = &cxbinds[];
let cxbind = match cxbinds {
[b] => b,
_ => panic!("expected just one binding for ext_cx")
}
_ => {
ecx.span_err(p.span,
- format!("expected ident for named argument, found `{}`",
- p.this_token_to_string()).index(&FullRange));
+ &format!("expected ident for named argument, found `{}`",
+ p.this_token_to_string())[]);
return None;
}
};
None => {}
Some(prev) => {
ecx.span_err(e.span,
- format!("duplicate argument named `{}`",
- name).index(&FullRange));
+ &format!("duplicate argument named `{}`",
+ name)[]);
ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here");
continue
}
let msg = format!("invalid reference to argument `{}` ({})",
arg, self.describe_num_args());
- self.ecx.span_err(self.fmtsp, msg.index(&FullRange));
+ self.ecx.span_err(self.fmtsp, &msg[]);
return;
}
{
Some(e) => e.span,
None => {
let msg = format!("there is no argument named `{}`", name);
- self.ecx.span_err(self.fmtsp, msg.index(&FullRange));
+ self.ecx.span_err(self.fmtsp, &msg[]);
return;
}
};
match (cur, ty) {
(&Known(ref cur), &Known(ref ty)) => {
self.ecx.span_err(sp,
- format!("argument redeclared with type `{}` when \
+ &format!("argument redeclared with type `{}` when \
it was previously `{}`",
*ty,
- *cur).index(&FullRange));
+ *cur)[]);
}
(&Known(ref cur), _) => {
self.ecx.span_err(sp,
- format!("argument used to format with `{}` was \
+ &format!("argument used to format with `{}` was \
attempted to not be used for formatting",
- *cur).index(&FullRange));
+ *cur)[]);
}
(_, &Known(ref ty)) => {
self.ecx.span_err(sp,
- format!("argument previously used as a format \
+ &format!("argument previously used as a format \
argument attempted to be used as `{}`",
- *ty).index(&FullRange));
+ *ty)[]);
}
(_, _) => {
self.ecx.span_err(sp, "argument declared with multiple formats");
/// Translate the accumulated string literals to a literal expression
fn trans_literal_string(&mut self) -> P<ast::Expr> {
let sp = self.fmtsp;
- let s = token::intern_and_get_ident(self.literal.index(&FullRange));
+ let s = token::intern_and_get_ident(&self.literal[]);
self.literal.clear();
self.ecx.expr_str(sp, s)
}
None => continue // error already generated
};
- let name = self.ecx.ident_of(format!("__arg{}", i).index(&FullRange));
+ let name = self.ecx.ident_of(&format!("__arg{}", i)[]);
pats.push(self.ecx.pat_ident(e.span, name));
locals.push(Context::format_arg(self.ecx, e.span, arg_ty,
self.ecx.expr_ident(e.span, name)));
None => continue
};
- let lname = self.ecx.ident_of(format!("__arg{}",
- *name).index(&FullRange));
+ let lname = self.ecx.ident_of(&format!("__arg{}",
+ *name)[]);
pats.push(self.ecx.pat_ident(e.span, lname));
names[self.name_positions[*name]] =
Some(Context::format_arg(self.ecx, e.span, arg_ty,
-> P<ast::Expr> {
let trait_ = match *ty {
Known(ref tyname) => {
- match tyname.index(&FullRange) {
+ match &tyname[] {
"" => "String",
"?" => "Show",
"e" => "LowerExp",
"X" => "UpperHex",
_ => {
ecx.span_err(sp,
- format!("unknown format trait `{}`",
- *tyname).index(&FullRange));
+ &format!("unknown format trait `{}`",
+ *tyname)[]);
"Dummy"
}
}
}
}
if !parser.errors.is_empty() {
- cx.ecx.span_err(cx.fmtsp, format!("invalid format string: {}",
- parser.errors.remove(0)).index(&FullRange));
+ cx.ecx.span_err(cx.fmtsp, &format!("invalid format string: {}",
+ parser.errors.remove(0))[]);
return DummyResult::raw_expr(sp);
}
if !cx.literal.is_empty() {
}
// the internal function for computing marks
-// it's not clear to me whether it's better to use a .index(&FullRange) mutable
+// it's not clear to me whether it's better to use a [] mutable
// vector or a cons-list for this.
fn marksof_internal(ctxt: SyntaxContext,
stopname: Name,
}
fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
- strs.iter().map(|str| str_to_ident((*str).index(&FullRange))).collect()
+ strs.iter().map(|str| str_to_ident(&(*str)[])).collect()
}
fn id_ext(str: &str) -> ast::Ident {
for i in range(0, tt.len()) {
seq.push(tt.get_tt(i));
}
- mk_tts(cx, seq.index(&FullRange))
+ mk_tts(cx, &seq[])
}
ast::TtToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut vector = vec!(stmt_let_sp, stmt_let_tt);
- vector.extend(mk_tts(cx, tts.index(&FullRange)).into_iter());
+ vector.extend(mk_tts(cx, &tts[]).into_iter());
let block = cx.expr_block(
cx.block_all(sp,
Vec::new(),
let topmost = cx.original_span_in_file();
let loc = cx.codemap().lookup_char_pos(topmost.lo);
- let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange));
+ let filename = token::intern_and_get_ident(&loc.file.name[]);
base::MacExpr::new(cx.expr_str(topmost, filename))
}
-> Box<base::MacResult+'static> {
let s = pprust::tts_to_string(tts);
base::MacExpr::new(cx.expr_str(sp,
- token::intern_and_get_ident(s.index(&FullRange))))
+ token::intern_and_get_ident(&s[])))
}
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
.connect("::");
base::MacExpr::new(cx.expr_str(
sp,
- token::intern_and_get_ident(string.index(&FullRange))))
+ token::intern_and_get_ident(&string[])))
}
/// include! : parse the given file as an expr
let bytes = match File::open(&file).read_to_end() {
Err(e) => {
cx.span_err(sp,
- format!("couldn't read {:?}: {}",
+ &format!("couldn't read {:?}: {}",
file.display(),
- e).index(&FullRange));
+ e)[]);
return DummyResult::expr(sp);
}
Ok(bytes) => bytes,
// Add this input file to the code map to make it available as
// dependency information
let filename = format!("{:?}", file.display());
- let interned = token::intern_and_get_ident(src.index(&FullRange));
+ let interned = token::intern_and_get_ident(&src[]);
cx.codemap().new_filemap(filename, src);
base::MacExpr::new(cx.expr_str(sp, interned))
}
Err(_) => {
cx.span_err(sp,
- format!("{:?} wasn't a utf-8 file",
- file.display()).index(&FullRange));
+ &format!("{:?} wasn't a utf-8 file",
+ file.display())[]);
return DummyResult::expr(sp);
}
}
match File::open(&file).read_to_end() {
Err(e) => {
cx.span_err(sp,
- format!("couldn't read {:?}: {}", file.display(), e).index(&FullRange));
+ &format!("couldn't read {:?}: {}", file.display(), e)[]);
return DummyResult::expr(sp);
}
Ok(bytes) => {
seq.num_captures
}
&TtDelimited(_, ref delim) => {
- count_names(delim.tts.index(&FullRange))
+ count_names(&delim.tts[])
}
&TtToken(_, MatchNt(..)) => {
1
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
-> Box<MatcherPos> {
- let match_idx_hi = count_names(ms.index(&FullRange));
+ let match_idx_hi = count_names(&ms[]);
let matches: Vec<_> = range(0, match_idx_hi).map(|_| Vec::new()).collect();
box MatcherPos {
stack: vec![],
let string = token::get_ident(bind_name);
p_s.span_diagnostic
.span_fatal(sp,
- format!("duplicated bind name: {}",
- string.get()).index(&FullRange))
+ &format!("duplicated bind name: {}",
+ string.get())[])
}
}
}
rdr: TtReader,
ms: Vec<TokenTree> )
-> HashMap<Ident, Rc<NamedMatch>> {
- match parse(sess, cfg, rdr, ms.index(&FullRange)) {
+ match parse(sess, cfg, rdr, &ms[]) {
Success(m) => m,
Failure(sp, str) => {
- sess.span_diagnostic.span_fatal(sp, str.index(&FullRange))
+ sess.span_diagnostic.span_fatal(sp, &str[])
}
Error(sp, str) => {
- sess.span_diagnostic.span_fatal(sp, str.index(&FullRange))
+ sess.span_diagnostic.span_fatal(sp, &str[])
}
}
}
for dv in (&mut eof_eis[0]).matches.iter_mut() {
v.push(dv.pop().unwrap());
}
- return Success(nameize(sess, ms, v.index(&FullRange)));
+ return Success(nameize(sess, ms, &v[]));
} else if eof_eis.len() > 1u {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
_ => {
let token_str = pprust::token_to_string(&p.token);
- p.fatal((format!("expected ident, found {}",
- token_str.index(&FullRange))).index(&FullRange))
+ p.fatal(&format!("expected ident, found {}",
+ &token_str[])[])
}
},
"path" => {
}
"meta" => token::NtMeta(p.parse_meta_item()),
_ => {
- p.fatal(format!("unsupported builtin nonterminal parser: {}", name).index(&FullRange))
+ p.fatal(&format!("unsupported builtin nonterminal parser: {}", name)[])
}
}
}
following",
token_str);
let span = parser.span;
- parser.span_err(span, msg.index(&FullRange));
+ parser.span_err(span, &msg[]);
}
}
}
self.name,
self.imported_from,
arg,
- self.lhses.index(&FullRange),
- self.rhses.index(&FullRange))
+ &self.lhses[],
+ &self.rhses[])
}
}
match **lhs {
MatchedNonterminal(NtTT(ref lhs_tt)) => {
let lhs_tt = match **lhs_tt {
- TtDelimited(_, ref delim) => delim.tts.index(&FullRange),
+ TtDelimited(_, ref delim) => &delim.tts[],
_ => cx.span_fatal(sp, "malformed macro lhs")
};
// `None` is because we're not interpolating
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
- Error(sp, ref msg) => cx.span_fatal(sp, msg.index(&FullRange))
+ Error(sp, ref msg) => cx.span_fatal(sp, &msg[])
}
}
_ => cx.bug("non-matcher found in parsed lhses")
}
}
- cx.span_fatal(best_fail_spot, best_fail_msg.index(&FullRange));
+ cx.span_fatal(best_fail_spot, &best_fail_msg[]);
}
// Note that macro-by-example's input is also matched against a token tree:
}
LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- r.sp_diag.span_fatal(sp.clone(), msg.index(&FullRange));
+ r.sp_diag.span_fatal(sp.clone(), &msg[]);
}
LisConstraint(len, _) => {
if len == 0 {
MatchedSeq(..) => {
r.sp_diag.span_fatal(
r.cur_span, /* blame the macro writer */
- format!("variable '{:?}' is still repeating at this depth",
- token::get_ident(ident)).index(&FullRange));
+ &format!("variable '{:?}' is still repeating at this depth",
+ token::get_ident(ident))[]);
}
}
}
fn gate_feature(&self, feature: &str, span: Span, explain: &str) {
if !self.has_feature(feature) {
self.span_handler.span_err(span, explain);
- self.span_handler.span_help(span, format!("add #![feature({})] to the \
+ self.span_handler.span_help(span, &format!("add #![feature({})] to the \
crate attributes to enable",
- feature).index(&FullRange));
+ feature)[]);
}
}
}
match i.node {
ast::ItemForeignMod(ref foreign_module) => {
- if attr::contains_name(i.attrs.index(&FullRange), "link_args") {
+ if attr::contains_name(&i.attrs[], "link_args") {
self.gate_feature("link_args", i.span,
"the `link_args` attribute is not portable \
across platforms, it is recommended to \
}
ast::ItemFn(..) => {
- if attr::contains_name(i.attrs.index(&FullRange), "plugin_registrar") {
+ if attr::contains_name(&i.attrs[], "plugin_registrar") {
self.gate_feature("plugin_registrar", i.span,
"compiler plugins are experimental and possibly buggy");
}
}
ast::ItemStruct(..) => {
- if attr::contains_name(i.attrs.index(&FullRange), "simd") {
+ if attr::contains_name(&i.attrs[], "simd") {
self.gate_feature("simd", i.span,
"SIMD types are experimental and possibly buggy");
}
removed in the future");
}
- if attr::contains_name(i.attrs.index(&FullRange),
+ if attr::contains_name(&i.attrs[],
"old_orphan_check") {
self.gate_feature(
"old_orphan_check",
"the new orphan check rules will eventually be strictly enforced");
}
- if attr::contains_name(i.attrs.index(&FullRange),
+ if attr::contains_name(&i.attrs[],
"old_impl_check") {
self.gate_feature("old_impl_check",
i.span,
}
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
- if attr::contains_name(i.attrs.index(&FullRange), "linkage") {
+ if attr::contains_name(&i.attrs[], "linkage") {
self.gate_feature("linkage", i.span,
"the `linkage` attribute is experimental \
and not portable across platforms")
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `#`, found `{}`", token_str).index(&FullRange));
+ self.fatal(&format!("expected `#`, found `{}`", token_str)[]);
}
};
while j > i && lines[j - 1].trim().is_empty() {
j -= 1;
}
- return lines.index(&(i..j)).iter().map(|x| (*x).clone()).collect();
+ return lines[i..j].iter().map(|x| (*x).clone()).collect();
}
/// remove a "[ \t]*\*" block from each line, if possible
if can_trim {
lines.iter().map(|line| {
- line.index(&((i + 1)..line.len())).to_string()
+ (&line[(i + 1)..line.len()]).to_string()
}).collect()
} else {
lines
static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
for prefix in ONLINERS.iter() {
if comment.starts_with(*prefix) {
- return comment.index(&(prefix.len()..)).to_string();
+ return (&comment[prefix.len()..]).to_string();
}
}
if comment.starts_with("/*") {
- let lines = comment.index(&(3u..(comment.len() - 2u)))
+ let lines = comment[3u..(comment.len() - 2u)]
.lines_any()
.map(|s| s.to_string())
.collect::<Vec<String> >();
let line = rdr.read_one_line_comment();
debug!("{}", line);
// Doc comments are not put in comments.
- if is_doc_comment(line.index(&FullRange)) {
+ if is_doc_comment(&line[]) {
break;
}
lines.push(line);
fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> ,
s: String, col: CharPos) {
let len = s.len();
- let s1 = match all_whitespace(s.index(&FullRange), col) {
+ let s1 = match all_whitespace(&s[], col) {
Some(col) => {
if col < len {
- s.index(&(col..len)).to_string()
+ (&s[col..len]).to_string()
} else {
"".to_string()
}
rdr.bump();
rdr.bump();
}
- if is_block_doc_comment(curr_line.index(&FullRange)) {
+ if is_block_doc_comment(&curr_line[]) {
return
}
assert!(!curr_line.contains_char('\n'));
let mut m = m.to_string();
m.push_str(": ");
for c in c.escape_default() { m.push(c) }
- self.fatal_span_(from_pos, to_pos, m.index(&FullRange));
+ self.fatal_span_(from_pos, to_pos, &m[]);
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
let mut m = m.to_string();
m.push_str(": ");
for c in c.escape_default() { m.push(c) }
- self.err_span_(from_pos, to_pos, m.index(&FullRange));
+ self.err_span_(from_pos, to_pos, &m[]);
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
m.push_str(": ");
let from = self.byte_offset(from_pos).to_uint();
let to = self.byte_offset(to_pos).to_uint();
- m.push_str(self.filemap.src.index(&(from..to)));
- self.fatal_span_(from_pos, to_pos, m.index(&FullRange));
+ m.push_str(&self.filemap.src[from..to]);
+ self.fatal_span_(from_pos, to_pos, &m[]);
}
/// Advance peek_tok and peek_span to refer to the next token, and
while i < s.len() {
let str::CharRange { ch, next } = s.char_range_at(i);
if ch == '\r' {
- if j < i { buf.push_str(s.index(&(j..i))); }
+ if j < i { buf.push_str(&s[j..i]); }
j = next;
if next >= s.len() || s.char_at(next) != '\n' {
let pos = start + BytePos(i as u32);
}
i = next;
}
- if j < s.len() { buf.push_str(s.index(&(j..))); }
+ if j < s.len() { buf.push_str(&s[j..]); }
buf
}
}
self.translate_crlf(start_bpos, string,
"bare CR not allowed in block doc-comment")
} else { string.into_cow() };
- token::DocComment(token::intern(string.index(&FullRange)))
+ token::DocComment(token::intern(&string[]))
} else {
token::Comment
};
// expansion purposes. See #12512 for the gory details of why
// this is necessary.
let ident = self.with_str_from(start, |lifetime_name| {
- str_to_ident(format!("'{}", lifetime_name).index(&FullRange))
+ str_to_ident(&format!("'{}", lifetime_name)[])
});
// Conjure up a "keyword checking ident" to make sure that
let bytes = match File::open(path).read_to_end() {
Ok(bytes) => bytes,
Err(e) => {
- err(format!("couldn't read {:?}: {:?}",
+ err(&format!("couldn't read {:?}: {:?}",
path.display(),
- e).index(&FullRange));
+ e)[]);
unreachable!()
}
};
- match str::from_utf8(bytes.index(&FullRange)).ok() {
+ match str::from_utf8(&bytes[]).ok() {
Some(s) => {
return string_to_filemap(sess, s.to_string(),
path.as_str().unwrap().to_string())
}
None => {
- err(format!("{:?} is not UTF-8 encoded", path.display()).index(&FullRange))
+ err(&format!("{:?} is not UTF-8 encoded", path.display())[])
}
}
unreachable!()
}
let msg = format!("lexer should have rejected a bad character escape {}", lit);
- let msg2 = msg.index(&FullRange);
+ let msg2 = &msg[];
fn esc(len: uint, lit: &str) -> Option<(char, int)> {
- num::from_str_radix(lit.index(&(2..len)), 16)
+ num::from_str_radix(&lit[2..len], 16)
.and_then(char::from_u32)
.map(|x| (x, len as int))
}
let unicode_escape = |&: | -> Option<(char, int)>
if lit.as_bytes()[2] == b'{' {
let idx = lit.find('}').expect(msg2);
- let subslice = lit.index(&(3..idx));
+ let subslice = &lit[3..idx];
num::from_str_radix(subslice, 16)
.and_then(char::from_u32)
.map(|x| (x, subslice.chars().count() as int + 4))
eat(&mut chars);
} else {
// otherwise, a normal escape
- let (c, n) = char_lit(lit.index(&(i..)));
+ let (c, n) = char_lit(&lit[i..]);
for _ in range(0, n - 1) { // we don't need to move past the first \
chars.next();
}
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
s.len() > 1 &&
first_chars.contains(&s.char_at(0)) &&
- s.index(&(1..)).chars().all(|c| '0' <= c && c <= '9')
+ s[1..].chars().all(|c| '0' <= c && c <= '9')
}
fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
// if it looks like a width, lets try to be helpful.
sd.span_err(sp, &*format!("illegal width `{}` for float literal, \
- valid widths are 32 and 64", suf.index(&(1..))));
+ valid widths are 32 and 64", &suf[1..]));
} else {
sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \
valid suffixes are `f32` and `f64`", suf));
b'\'' => b'\'',
b'0' => b'\0',
_ => {
- match ::std::num::from_str_radix::<u64>(lit.index(&(2..4)), 16) {
+ match ::std::num::from_str_radix::<u64>(&lit[2..4], 16) {
Some(c) =>
if c > 0xFF {
panic!(err(2))
}
_ => {
// otherwise, a normal escape
- let (c, n) = byte_lit(lit.index(&(i..)));
+ let (c, n) = byte_lit(&lit[i..]);
// we don't need to move past the first \
for _ in range(0, n - 1) {
chars.next();
// s can only be ascii, byte indexing is fine
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
- let mut s = s2.index(&FullRange);
+ let mut s = &s2[];
debug!("integer_lit: {}, {:?}", s, suffix);
}
if base != 10 {
- s = s.index(&(2..));
+ s = &s[2..];
}
if let Some(suf) = suffix {
if looks_like_width_suffix(&['i', 'u'], suf) {
sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \
valid widths are 8, 16, 32 and 64",
- suf.index(&(1..))));
+ &suf[1..]));
} else {
sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf));
}
#[test]
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
- let tts: &[ast::TokenTree] = tts.index(&FullRange);
+ let tts: &[ast::TokenTree] = &tts[];
match tts {
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
ast::TtToken(_, token::Not),
ast::TtDelimited(_, ref macro_delimed)]
if name_macro_rules.as_str() == "macro_rules"
&& name_zip.as_str() == "zip" => {
- match macro_delimed.tts.index(&FullRange) {
+ match ¯o_delimed.tts[] {
[ast::TtDelimited(_, ref first_delimed),
ast::TtToken(_, token::FatArrow),
ast::TtDelimited(_, ref second_delimed)]
if macro_delimed.delim == token::Paren => {
- match first_delimed.tts.index(&FullRange) {
+ match &first_delimed.tts[] {
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, token::Plain))]
if first_delimed.delim == token::Paren
&& name.as_str() == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed),
}
- match second_delimed.tts.index(&FullRange) {
+ match &second_delimed.tts[] {
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, token::Plain))]
if second_delimed.delim == token::Paren
let use_s = "use foo::bar::baz;";
let vitem = string_to_view_item(use_s.to_string());
let vitem_s = view_item_to_string(&vitem);
- assert_eq!(vitem_s.index(&FullRange), use_s);
+ assert_eq!(&vitem_s[], use_s);
let use_s = "use foo::bar as baz;";
let vitem = string_to_view_item(use_s.to_string());
let vitem_s = view_item_to_string(&vitem);
- assert_eq!(vitem_s.index(&FullRange), use_s);
+ assert_eq!(&vitem_s[], use_s);
}
#[test] fn parse_extern_crate() {
let ex_s = "extern crate foo;";
let vitem = string_to_view_item(ex_s.to_string());
let vitem_s = view_item_to_string(&vitem);
- assert_eq!(vitem_s.index(&FullRange), ex_s);
+ assert_eq!(&vitem_s[], ex_s);
let ex_s = "extern crate \"foo\" as bar;";
let vitem = string_to_view_item(ex_s.to_string());
let vitem_s = view_item_to_string(&vitem);
- assert_eq!(vitem_s.index(&FullRange), ex_s);
+ assert_eq!(&vitem_s[], ex_s);
}
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
let docs = item.attrs.iter().filter(|a| a.name().get() == "doc")
.map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
- assert_eq!(docs.index(&FullRange), b);
+ assert_eq!(&docs[], b);
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
kind_str: &str,
desc: &str) {
self.span_err(sp,
- format!("obsolete syntax: {}", kind_str).index(&FullRange));
+ &format!("obsolete syntax: {}", kind_str)[]);
if !self.obsolete_set.contains(&kind) {
self.sess
.span_diagnostic
.handler()
- .note(format!("{}", desc).index(&FullRange));
+ .note(&format!("{}", desc)[]);
self.obsolete_set.insert(kind);
}
}
pub fn unexpected_last(&mut self, t: &token::Token) -> ! {
let token_str = Parser::token_to_string(t);
let last_span = self.last_span;
- self.span_fatal(last_span, format!("unexpected token: `{}`",
- token_str).index(&FullRange));
+ self.span_fatal(last_span, &format!("unexpected token: `{}`",
+ token_str)[]);
}
pub fn unexpected(&mut self) -> ! {
let this_token = self.this_token_to_string();
- self.fatal(format!("unexpected token: `{}`", this_token).index(&FullRange));
+ self.fatal(&format!("unexpected token: `{}`", this_token)[]);
}
/// Expect and consume the token t. Signal an error if
} else {
let token_str = Parser::token_to_string(t);
let this_token_str = self.this_token_to_string();
- self.fatal(format!("expected `{}`, found `{}`",
+ self.fatal(&format!("expected `{}`, found `{}`",
token_str,
- this_token_str).index(&FullRange))
+ this_token_str)[])
}
} else {
self.expect_one_of(slice::ref_slice(t), &[]);
expected.push_all(&*self.expected_tokens);
expected.sort_by(|a, b| a.to_string().cmp(&b.to_string()));
expected.dedup();
- let expect = tokens_to_string(expected.index(&FullRange));
+ let expect = tokens_to_string(&expected[]);
let actual = self.this_token_to_string();
self.fatal(
- (if expected.len() != 1 {
+ &(if expected.len() != 1 {
(format!("expected one of {}, found `{}`",
expect,
actual))
(format!("expected {}, found `{}`",
expect,
actual))
- }).index(&FullRange)
+ }[])
)
}
}
// might be unit-struct construction; check for recoverableinput error.
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
expected.push_all(inedible);
- self.check_for_erroneous_unit_struct_expecting(expected.index(&FullRange));
+ self.check_for_erroneous_unit_struct_expecting(&expected[]);
}
self.expect_one_of(edible, inedible)
}
.as_ref()
.map_or(false, |t| t.is_ident() || t.is_path()) {
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
- expected.push_all(inedible.index(&FullRange));
+ expected.push_all(&inedible[]);
self.check_for_erroneous_unit_struct_expecting(
- expected.index(&FullRange));
+ &expected[]);
}
self.expect_one_of(edible, inedible)
}
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal((format!("expected ident, found `{}`",
- token_str)).index(&FullRange))
+ self.fatal(&format!("expected ident, found `{}`",
+ token_str)[])
}
}
}
if !self.eat_keyword(kw) {
let id_interned_str = token::get_name(kw.to_name());
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `{}`, found `{}`",
- id_interned_str, token_str).index(&FullRange))
+ self.fatal(&format!("expected `{}`, found `{}`",
+ id_interned_str, token_str)[])
}
}
let token_str = self.this_token_to_string();
let span = self.span;
self.span_err(span,
- format!("expected identifier, found keyword `{}`",
- token_str).index(&FullRange));
+ &format!("expected identifier, found keyword `{}`",
+ token_str)[]);
}
}
pub fn check_reserved_keywords(&mut self) {
if self.token.is_reserved_keyword() {
let token_str = self.this_token_to_string();
- self.fatal(format!("`{}` is a reserved keyword",
- token_str).index(&FullRange))
+ self.fatal(&format!("`{}` is a reserved keyword",
+ token_str)[])
}
}
let token_str = self.this_token_to_string();
let found_token =
Parser::token_to_string(&token::BinOp(token::And));
- self.fatal(format!("expected `{}`, found `{}`",
+ self.fatal(&format!("expected `{}`, found `{}`",
found_token,
- token_str).index(&FullRange))
+ token_str)[])
}
}
}
let found_token = self.this_token_to_string();
let token_str =
Parser::token_to_string(&token::BinOp(token::Or));
- self.fatal(format!("expected `{}`, found `{}`",
+ self.fatal(&format!("expected `{}`, found `{}`",
token_str,
- found_token).index(&FullRange))
+ found_token)[])
}
}
}
if !self.eat_lt() {
let found_token = self.this_token_to_string();
let token_str = Parser::token_to_string(&token::Lt);
- self.fatal(format!("expected `{}`, found `{}`",
+ self.fatal(&format!("expected `{}`, found `{}`",
token_str,
- found_token).index(&FullRange))
+ found_token)[])
}
}
_ => {
let gt_str = Parser::token_to_string(&token::Gt);
let this_token_str = self.this_token_to_string();
- self.fatal(format!("expected `{}`, found `{}`",
+ self.fatal(&format!("expected `{}`, found `{}`",
gt_str,
- this_token_str).index(&FullRange))
+ this_token_str)[])
}
}
}
let (inner_attrs, body) =
p.parse_inner_attrs_and_block();
let mut attrs = attrs;
- attrs.push_all(inner_attrs.index(&FullRange));
+ attrs.push_all(&inner_attrs[]);
ProvidedMethod(P(ast::Method {
attrs: attrs,
id: ast::DUMMY_NODE_ID,
_ => {
let token_str = p.this_token_to_string();
- p.fatal((format!("expected `;` or `{{`, found `{}`",
- token_str)).index(&FullRange))
+ p.fatal(&format!("expected `;` or `{{`, found `{}`",
+ token_str)[])
}
}
}
} else {
let this_token_str = self.this_token_to_string();
let msg = format!("expected type, found `{}`", this_token_str);
- self.fatal(msg.index(&FullRange));
+ self.fatal(&msg[]);
};
let sp = mk_sp(lo, self.last_span.hi);
token::StrRaw(s, n) => {
(true,
LitStr(
- token::intern_and_get_ident(
- parse::raw_str_lit(s.as_str()).index(&FullRange)),
+ token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())[]),
ast::RawStr(n)))
}
token::Binary(i) =>
};
}
_ => {
- self.fatal(format!("expected a lifetime name").index(&FullRange));
+ self.fatal(&format!("expected a lifetime name")[]);
}
}
}
let msg = format!("expected `,` or `>` after lifetime \
name, found `{}`",
this_token_str);
- self.fatal(msg.index(&FullRange));
+ self.fatal(&msg[]);
}
}
}
let last_span = self.last_span;
let fstr = n.as_str();
self.span_err(last_span,
- format!("unexpected token: `{}`", n.as_str()).index(&FullRange));
+ &format!("unexpected token: `{}`", n.as_str())[]);
if fstr.chars().all(|x| "0123456789.".contains_char(x)) {
let float = match fstr.parse::<f64>() {
Some(f) => f,
None => continue,
};
self.span_help(last_span,
- format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
+ &format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
float.trunc() as uint,
- float.fract().to_string().index(&(1..))).index(&FullRange));
+ &float.fract().to_string()[1..])[]);
}
self.abort_if_errors();
if self.quote_depth == 0u {
match self.token {
token::SubstNt(name, _) =>
- self.fatal(format!("unknown macro variable `{}`",
- token::get_ident(name)).index(&FullRange)),
+ self.fatal(&format!("unknown macro variable `{}`",
+ token::get_ident(name))[]),
_ => {}
}
}
Some(&sp) => p.span_note(sp, "unclosed delimiter"),
};
let token_str = p.this_token_to_string();
- p.fatal(format!("incorrect close delimiter: `{}`",
- token_str).index(&FullRange))
+ p.fatal(&format!("incorrect close delimiter: `{}`",
+ token_str)[])
},
/* we ought to allow different depths of unquotation */
token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => {
let span = self.span;
let this_token_to_string = self.this_token_to_string();
self.span_err(span,
- format!("expected expression, found `{}`",
- this_token_to_string).index(&FullRange));
+ &format!("expected expression, found `{}`",
+ this_token_to_string)[]);
let box_span = mk_sp(lo, self.last_span.hi);
self.span_help(box_span,
"perhaps you meant `box() (foo)` instead?");
self.bump();
if self.token != token::CloseDelim(token::Brace) {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `{}`, found `{}`", "}",
- token_str).index(&FullRange))
+ self.fatal(&format!("expected `{}`, found `{}`", "}",
+ token_str)[])
}
etc = true;
break;
match bind_type {
BindByRef(..) | BindByValue(MutMutable) => {
let token_str = self.this_token_to_string();
- self.fatal(format!("unexpected `{}`",
- token_str).index(&FullRange))
+ self.fatal(&format!("unexpected `{}`",
+ token_str)[])
}
_ => {}
}
let span = self.span;
let tok_str = self.this_token_to_string();
self.span_fatal(span,
- format!("expected identifier, found `{}`", tok_str).index(&FullRange));
+ &format!("expected identifier, found `{}`", tok_str)[]);
}
let ident = self.parse_ident();
let last_span = self.last_span;
let lo = self.span.lo;
if self.token.is_keyword(keywords::Let) {
- check_expected_item(self, item_attrs.index(&FullRange));
+ check_expected_item(self, &item_attrs[]);
self.expect_keyword(keywords::Let);
let decl = self.parse_let();
P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))
&& self.look_ahead(1, |t| *t == token::Not) {
// it's a macro invocation:
- check_expected_item(self, item_attrs.index(&FullRange));
+ check_expected_item(self, &item_attrs[]);
// Potential trouble: if we allow macros with paths instead of
// idents, we'd need to look ahead past the whole path here...
""
};
let tok_str = self.this_token_to_string();
- self.fatal(format!("expected {}`(` or `{{`, found `{}`",
+ self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
ident_str,
- tok_str).index(&FullRange))
+ tok_str)[])
},
};
}
} else {
let found_attrs = !item_attrs.is_empty();
- let item_err = Parser::expected_item_err(item_attrs.index(&FullRange));
+ let item_err = Parser::expected_item_err(&item_attrs[]);
match self.parse_item_or_view_item(item_attrs, false) {
IoviItem(i) => {
let hi = i.span.hi;
let sp = self.span;
let tok = self.this_token_to_string();
self.span_fatal_help(sp,
- format!("expected `{{`, found `{}`", tok).index(&FullRange),
+ &format!("expected `{{`, found `{}`", tok)[],
"place this code inside a block");
}
while self.token != token::CloseDelim(token::Brace) {
// parsing items even when they're not allowed lets us give
// better error messages and recover more gracefully.
- attributes_box.push_all(self.parse_outer_attributes().index(&FullRange));
+ attributes_box.push_all(&self.parse_outer_attributes()[]);
match self.token {
token::Semi => {
if !attributes_box.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(attributes_box.index(&FullRange)));
+ Parser::expected_item_err(&attributes_box[]));
attributes_box = Vec::new();
}
self.bump(); // empty
if !attributes_box.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(attributes_box.index(&FullRange)));
+ Parser::expected_item_err(&attributes_box[]));
}
let hi = self.span.hi;
},
_ => {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `self`, found `{}`",
- token_str).index(&FullRange))
+ self.fatal(&format!("expected `self`, found `{}`",
+ token_str)[])
}
}
}
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `,` or `)`, found `{}`",
- token_str).index(&FullRange))
+ self.fatal(&format!("expected `,` or `)`, found `{}`",
+ token_str)[])
}
}
}
let (inner_attrs, body) = self.parse_inner_attrs_and_block();
let body_span = body.span;
let mut new_attrs = attrs;
- new_attrs.push_all(inner_attrs.index(&FullRange));
+ new_attrs.push_all(&inner_attrs[]);
(ast::MethDecl(ident,
generics,
abi,
}
if fields.len() == 0 {
- self.fatal(format!("unit-like struct definition should be \
+ self.fatal(&format!("unit-like struct definition should be \
written as `struct {};`",
- token::get_ident(class_name.clone())).index(&FullRange));
+ token::get_ident(class_name.clone()))[]);
}
self.bump();
} else {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `where`, or `{}` after struct \
+ self.fatal(&format!("expected `where`, or `{}` after struct \
name, found `{}`", "{",
- token_str).index(&FullRange));
+ token_str)[]);
}
fields
});
if fields.len() == 0 {
- self.fatal(format!("unit-like struct definition should be \
+ self.fatal(&format!("unit-like struct definition should be \
written as `struct {};`",
- token::get_ident(class_name.clone())).index(&FullRange));
+ token::get_ident(class_name.clone()))[]);
}
self.parse_where_clause(generics);
// This case is where we see: `struct Foo<T>;`
} else {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected `where`, `{}`, `(`, or `;` after struct \
- name, found `{}`", "{", token_str).index(&FullRange));
+ self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \
+ name, found `{}`", "{", token_str)[]);
}
}
let span = self.span;
let token_str = self.this_token_to_string();
self.span_fatal_help(span,
- format!("expected `,`, or `}}`, found `{}`",
- token_str).index(&FullRange),
+ &format!("expected `,`, or `}}`, found `{}`",
+ token_str)[],
"struct fields should be separated by commas")
}
}
let mut attrs = self.parse_outer_attributes();
if first {
let mut tmp = attrs_remaining.clone();
- tmp.push_all(attrs.index(&FullRange));
+ tmp.push_all(&attrs[]);
attrs = tmp;
first = false;
}
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal(format!("expected item, found `{}`",
- token_str).index(&FullRange))
+ self.fatal(&format!("expected item, found `{}`",
+ token_str)[])
}
}
}
// We parsed attributes for the first item but didn't find it
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(attrs_remaining.index(&FullRange)));
+ Parser::expected_item_err(&attrs_remaining[]));
}
ast::Mod {
-> (ast::Item_, Vec<ast::Attribute> ) {
let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span));
prefix.pop();
- let mod_path = Path::new(".").join_many(self.mod_path_stack.index(&FullRange));
+ let mod_path = Path::new(".").join_many(&self.mod_path_stack[]);
let dir_path = prefix.join(&mod_path);
let mod_string = token::get_ident(id);
let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name(
let mod_name = mod_string.get().to_string();
let default_path_str = format!("{}.rs", mod_name);
let secondary_path_str = format!("{}/mod.rs", mod_name);
- let default_path = dir_path.join(default_path_str.index(&FullRange));
- let secondary_path = dir_path.join(secondary_path_str.index(&FullRange));
+ let default_path = dir_path.join(&default_path_str[]);
+ let secondary_path = dir_path.join(&secondary_path_str[]);
let default_exists = default_path.exists();
let secondary_exists = secondary_path.exists();
None => self.root_module_name.as_ref().unwrap().clone(),
};
self.span_note(id_sp,
- format!("maybe move this module `{0}` \
+ &format!("maybe move this module `{0}` \
to its own directory via \
`{0}/mod.rs`",
- this_module).index(&FullRange));
+ this_module)[]);
if default_exists || secondary_exists {
self.span_note(id_sp,
- format!("... or maybe `use` the module \
+ &format!("... or maybe `use` the module \
`{}` instead of possibly \
redeclaring it",
- mod_name).index(&FullRange));
+ mod_name)[]);
}
self.abort_if_errors();
}
(false, true) => (secondary_path, true),
(false, false) => {
self.span_fatal_help(id_sp,
- format!("file not found for module `{}`",
- mod_name).index(&FullRange),
- format!("name the file either {} or {} inside \
+ &format!("file not found for module `{}`",
+ mod_name)[],
+ &format!("name the file either {} or {} inside \
the directory {:?}",
default_path_str,
secondary_path_str,
- dir_path.display()).index(&FullRange));
+ dir_path.display())[]);
}
(true, true) => {
self.span_fatal_help(
id_sp,
- format!("file for module `{}` found at both {} \
+ &format!("file for module `{}` found at both {} \
and {}",
mod_name,
default_path_str,
- secondary_path_str).index(&FullRange),
+ secondary_path_str)[],
"delete or rename one of them to remove the ambiguity");
}
}
let mut err = String::from_str("circular modules: ");
let len = included_mod_stack.len();
for p in included_mod_stack.slice(i, len).iter() {
- err.push_str(p.display().as_cow().index(&FullRange));
+ err.push_str(&p.display().as_cow()[]);
err.push_str(" -> ");
}
- err.push_str(path.display().as_cow().index(&FullRange));
- self.span_fatal(id_sp, err.index(&FullRange));
+ err.push_str(&path.display().as_cow()[]);
+ self.span_fatal(id_sp, &err[]);
}
None => ()
}
if !attrs_remaining.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
- Parser::expected_item_err(attrs_remaining.index(&FullRange)));
+ Parser::expected_item_err(&attrs_remaining[]));
}
assert!(self.token == token::CloseDelim(token::Brace));
ast::ForeignMod {
self.span_err(span, "expected `;`, found `as`");
self.span_help(span,
- format!("perhaps you meant to enclose the crate name `{}` in \
+ &format!("perhaps you meant to enclose the crate name `{}` in \
a string?",
- the_ident.as_str()).index(&FullRange));
+ the_ident.as_str())[]);
None
} else {
None
let span = self.span;
let token_str = self.this_token_to_string();
self.span_fatal(span,
- format!("expected extern crate name but \
+ &format!("expected extern crate name but \
found `{}`",
- token_str).index(&FullRange));
+ token_str)[]);
}
};
let struct_def = self.parse_struct_def();
if struct_def.fields.len() == 0 {
self.span_err(start_span,
- format!("unit-like struct variant should be written \
+ &format!("unit-like struct variant should be written \
without braces, as `{},`",
- token::get_ident(ident)).index(&FullRange));
+ token::get_ident(ident))[]);
}
kind = StructVariantKind(struct_def);
} else if self.check(&token::OpenDelim(token::Paren)) {
let last_span = self.last_span;
self.span_err(
last_span,
- format!("illegal ABI: expected one of [{}], \
+ &format!("illegal ABI: expected one of [{}], \
found `{}`",
abi::all_names().connect(", "),
- the_string).index(&FullRange));
+ the_string)[]);
None
}
}
if next_is_mod {
let last_span = self.last_span;
self.span_err(mk_sp(lo, last_span.hi),
- format!("`extern mod` is obsolete, use \
+ &format!("`extern mod` is obsolete, use \
`extern crate` instead \
to refer to external \
- crates.").index(&FullRange))
+ crates.")[])
}
return self.parse_item_extern_crate(lo, visibility, attrs);
}
let span = self.span;
let token_str = self.this_token_to_string();
self.span_fatal(span,
- format!("expected `{}` or `fn`, found `{}`", "{",
- token_str).index(&FullRange));
+ &format!("expected `{}` or `fn`, found `{}`", "{",
+ token_str)[]);
}
if self.eat_keyword(keywords::Virtual) {
if self.eat_keyword(keywords::Mod) {
// MODULE ITEM
let (ident, item_, extra_attrs) =
- self.parse_item_mod(attrs.index(&FullRange));
+ self.parse_item_mod(&attrs[]);
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
macros_allowed: bool)
-> ParsedItemsAndViewItems {
let mut attrs = first_item_attrs;
- attrs.push_all(self.parse_outer_attributes().index(&FullRange));
+ attrs.push_all(&self.parse_outer_attributes()[]);
// First, parse view items.
let mut view_items : Vec<ast::ViewItem> = Vec::new();
let mut items = Vec::new();
macros_allowed: bool)
-> ParsedItemsAndViewItems {
let mut attrs = first_item_attrs;
- attrs.push_all(self.parse_outer_attributes().index(&FullRange));
+ attrs.push_all(&self.parse_outer_attributes()[]);
let mut foreign_items = Vec::new();
loop {
match self.parse_foreign_item(attrs, macros_allowed) {
$(init_vec.push($si_str);)*
$(init_vec.push($sk_str);)*
$(init_vec.push($rk_str);)*
- interner::StrInterner::prefill(init_vec.index(&FullRange))
+ interner::StrInterner::prefill(&init_vec[])
}
}}
#[inline]
pub fn get<'a>(&'a self) -> &'a str {
- self.string.index(&FullRange)
+ &self.string[]
}
}
impl fmt::String for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{}", self.string.index(&FullRange))
+ write!(f, "{}", &self.string[])
}
}
impl<'a> PartialEq<&'a str> for InternedString {
#[inline(always)]
fn eq(&self, other: & &'a str) -> bool {
- PartialEq::eq(self.string.index(&FullRange), *other)
+ PartialEq::eq(&self.string[], *other)
}
#[inline(always)]
fn ne(&self, other: & &'a str) -> bool {
- PartialEq::ne(self.string.index(&FullRange), *other)
+ PartialEq::ne(&self.string[], *other)
}
}
impl<'a> PartialEq<InternedString > for &'a str {
#[inline(always)]
fn eq(&self, other: &InternedString) -> bool {
- PartialEq::eq(*self, other.string.index(&FullRange))
+ PartialEq::eq(*self, &other.string[])
}
#[inline(always)]
fn ne(&self, other: &InternedString) -> bool {
- PartialEq::ne(*self, other.string.index(&FullRange))
+ PartialEq::ne(*self, &other.string[])
}
}
impl Decodable for InternedString {
fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
- Ok(get_name(get_ident_interner().intern(try!(d.read_str()).index(&FullRange))))
+ Ok(get_name(get_ident_interner().intern(&try!(d.read_str())[])))
}
}
impl Encodable for InternedString {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_str(self.string.index(&FullRange))
+ s.emit_str(&self.string[])
}
}
if i != left {
s.push_str(", ");
}
- s.push_str(format!("{}={}",
+ s.push_str(&format!("{}={}",
szs[i],
- tok_str(toks[i].clone())).index(&FullRange));
+ tok_str(toks[i].clone()))[]);
i += 1u;
i %= n;
}
assert_eq!(l, len);
// assert!(l <= space);
self.space -= len;
- self.print_str(s.index(&FullRange))
+ self.print_str(&s[])
}
Eof => {
// Eof should never get here.
out,
ann,
is_expanded);
- try!(s.print_mod(&krate.module, krate.attrs.index(&FullRange)));
+ try!(s.print_mod(&krate.module, &krate.attrs[]));
try!(s.print_remaining_comments());
eof(&mut s.s)
}
pub fn synth_comment(&mut self, text: String) -> IoResult<()> {
try!(word(&mut self.s, "/*"));
try!(space(&mut self.s));
- try!(word(&mut self.s, text.index(&FullRange)));
+ try!(word(&mut self.s, &text[]));
try!(space(&mut self.s));
word(&mut self.s, "*/")
}
}
ast::TyTup(ref elts) => {
try!(self.popen());
- try!(self.commasep(Inconsistent, elts.index(&FullRange),
+ try!(self.commasep(Inconsistent, &elts[],
|s, ty| s.print_type(&**ty)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
}
ast::TyObjectSum(ref ty, ref bounds) => {
try!(self.print_type(&**ty));
- try!(self.print_bounds("+", bounds.index(&FullRange)));
+ try!(self.print_bounds("+", &bounds[]));
}
ast::TyPolyTraitRef(ref bounds) => {
- try!(self.print_bounds("", bounds.index(&FullRange)));
+ try!(self.print_bounds("", &bounds[]));
}
ast::TyQPath(ref qpath) => {
try!(word(&mut self.s, "<"));
item: &ast::ForeignItem) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
- try!(self.print_outer_attributes(item.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&item.attrs[]));
match item.node {
ast::ForeignItemFn(ref decl, ref generics) => {
try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics,
self.end() // end the outer fn box
}
ast::ForeignItemStatic(ref t, m) => {
- try!(self.head(visibility_qualified(item.vis,
- "static").index(&FullRange)));
+ try!(self.head(&visibility_qualified(item.vis,
+ "static")[]));
if m {
try!(self.word_space("mut"));
}
fn print_associated_type(&mut self, typedef: &ast::AssociatedType)
-> IoResult<()>
{
- try!(self.print_outer_attributes(typedef.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&typedef.attrs[]));
try!(self.word_space("type"));
try!(self.print_ty_param(&typedef.ty_param));
word(&mut self.s, ";")
pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
- try!(self.print_outer_attributes(item.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&item.attrs[]));
try!(self.ann.pre(self, NodeItem(item)));
match item.node {
ast::ItemStatic(ref ty, m, ref expr) => {
- try!(self.head(visibility_qualified(item.vis,
- "static").index(&FullRange)));
+ try!(self.head(&visibility_qualified(item.vis,
+ "static")[]));
if m == ast::MutMutable {
try!(self.word_space("mut"));
}
try!(self.end()); // end the outer cbox
}
ast::ItemConst(ref ty, ref expr) => {
- try!(self.head(visibility_qualified(item.vis,
- "const").index(&FullRange)));
+ try!(self.head(&visibility_qualified(item.vis,
+ "const")[]));
try!(self.print_ident(item.ident));
try!(self.word_space(":"));
try!(self.print_type(&**ty));
item.vis
));
try!(word(&mut self.s, " "));
- try!(self.print_block_with_attrs(&**body, item.attrs.index(&FullRange)));
+ try!(self.print_block_with_attrs(&**body, &item.attrs[]));
}
ast::ItemMod(ref _mod) => {
- try!(self.head(visibility_qualified(item.vis,
- "mod").index(&FullRange)));
+ try!(self.head(&visibility_qualified(item.vis,
+ "mod")[]));
try!(self.print_ident(item.ident));
try!(self.nbsp());
try!(self.bopen());
- try!(self.print_mod(_mod, item.attrs.index(&FullRange)));
+ try!(self.print_mod(_mod, &item.attrs[]));
try!(self.bclose(item.span));
}
ast::ItemForeignMod(ref nmod) => {
try!(self.head("extern"));
- try!(self.word_nbsp(nmod.abi.to_string().index(&FullRange)));
+ try!(self.word_nbsp(&nmod.abi.to_string()[]));
try!(self.bopen());
- try!(self.print_foreign_mod(nmod, item.attrs.index(&FullRange)));
+ try!(self.print_foreign_mod(nmod, &item.attrs[]));
try!(self.bclose(item.span));
}
ast::ItemTy(ref ty, ref params) => {
try!(self.ibox(indent_unit));
try!(self.ibox(0u));
- try!(self.word_nbsp(visibility_qualified(item.vis,
- "type").index(&FullRange)));
+ try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[]));
try!(self.print_ident(item.ident));
try!(self.print_generics(params));
try!(self.end()); // end the inner ibox
));
}
ast::ItemStruct(ref struct_def, ref generics) => {
- try!(self.head(visibility_qualified(item.vis,"struct").index(&FullRange)));
+ try!(self.head(&visibility_qualified(item.vis,"struct")[]));
try!(self.print_struct(&**struct_def, generics, item.ident, item.span));
}
try!(space(&mut self.s));
try!(self.bopen());
- try!(self.print_inner_attributes(item.attrs.index(&FullRange)));
+ try!(self.print_inner_attributes(&item.attrs[]));
for impl_item in impl_items.iter() {
match *impl_item {
ast::MethodImplItem(ref meth) => {
real_bounds.push(b);
}
}
- try!(self.print_bounds(":", real_bounds.index(&FullRange)));
+ try!(self.print_bounds(":", &real_bounds[]));
try!(self.print_where_clause(generics));
try!(word(&mut self.s, " "));
try!(self.bopen());
try!(self.print_ident(item.ident));
try!(self.cbox(indent_unit));
try!(self.popen());
- try!(self.print_tts(tts.index(&FullRange)));
+ try!(self.print_tts(&tts[]));
try!(self.pclose());
try!(word(&mut self.s, ";"));
try!(self.end());
generics: &ast::Generics, ident: ast::Ident,
span: codemap::Span,
visibility: ast::Visibility) -> IoResult<()> {
- try!(self.head(visibility_qualified(visibility, "enum").index(&FullRange)));
+ try!(self.head(&visibility_qualified(visibility, "enum")[]));
try!(self.print_ident(ident));
try!(self.print_generics(generics));
try!(self.print_where_clause(generics));
try!(space(&mut self.s));
- self.print_variants(enum_definition.variants.index(&FullRange), span)
+ self.print_variants(&enum_definition.variants[], span)
}
pub fn print_variants(&mut self,
for v in variants.iter() {
try!(self.space_if_not_bol());
try!(self.maybe_print_comment(v.span.lo));
- try!(self.print_outer_attributes(v.node.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&v.node.attrs[]));
try!(self.ibox(indent_unit));
try!(self.print_variant(&**v));
try!(word(&mut self.s, ","));
if !struct_def.fields.is_empty() {
try!(self.popen());
try!(self.commasep(
- Inconsistent, struct_def.fields.index(&FullRange),
+ Inconsistent, &struct_def.fields[],
|s, field| {
match field.node.kind {
ast::NamedField(..) => panic!("unexpected named field"),
ast::NamedField(ident, visibility) => {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(field.span.lo));
- try!(self.print_outer_attributes(field.node.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&field.node.attrs[]));
try!(self.print_visibility(visibility));
try!(self.print_ident(ident));
try!(self.word_nbsp(":"));
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt {
ast::TtToken(_, ref tk) => {
- try!(word(&mut self.s, token_to_string(tk).index(&FullRange)));
+ try!(word(&mut self.s, &token_to_string(tk)[]));
match *tk {
parse::token::DocComment(..) => {
hardbreak(&mut self.s)
}
}
ast::TtDelimited(_, ref delimed) => {
- try!(word(&mut self.s, token_to_string(&delimed.open_token()).index(&FullRange)));
+ try!(word(&mut self.s, &token_to_string(&delimed.open_token())[]));
try!(space(&mut self.s));
- try!(self.print_tts(delimed.tts.index(&FullRange)));
+ try!(self.print_tts(&delimed.tts[]));
try!(space(&mut self.s));
- word(&mut self.s, token_to_string(&delimed.close_token()).index(&FullRange))
+ word(&mut self.s, &token_to_string(&delimed.close_token())[])
},
ast::TtSequence(_, ref seq) => {
try!(word(&mut self.s, "$("));
try!(word(&mut self.s, ")"));
match seq.separator {
Some(ref tk) => {
- try!(word(&mut self.s, token_to_string(tk).index(&FullRange)));
+ try!(word(&mut self.s, &token_to_string(tk)[]));
}
None => {},
}
if !args.is_empty() {
try!(self.popen());
try!(self.commasep(Consistent,
- args.index(&FullRange),
+ &args[],
|s, arg| s.print_type(&*arg.ty)));
try!(self.pclose());
}
pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(m.span.lo));
- try!(self.print_outer_attributes(m.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&m.attrs[]));
try!(self.print_ty_fn(None,
None,
m.unsafety,
pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(meth.span.lo));
- try!(self.print_outer_attributes(meth.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&meth.attrs[]));
match meth.node {
ast::MethDecl(ident,
ref generics,
Some(&explicit_self.node),
vis));
try!(word(&mut self.s, " "));
- self.print_block_with_attrs(&**body, meth.attrs.index(&FullRange))
+ self.print_block_with_attrs(&**body, &meth.attrs[])
},
ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _),
..}) => {
try!(word(&mut self.s, "! "));
try!(self.cbox(indent_unit));
try!(self.popen());
- try!(self.print_tts(tts.index(&FullRange)));
+ try!(self.print_tts(&tts[]));
try!(self.pclose());
try!(word(&mut self.s, ";"));
self.end()
ast::ExprVec(ref exprs) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
- try!(self.commasep_exprs(Inconsistent, exprs.index(&FullRange)));
+ try!(self.commasep_exprs(Inconsistent, &exprs[]));
try!(word(&mut self.s, "]"));
try!(self.end());
}
try!(word(&mut self.s, "{"));
try!(self.commasep_cmnt(
Consistent,
- fields.index(&FullRange),
+ &fields[],
|s, field| {
try!(s.ibox(indent_unit));
try!(s.print_ident(field.ident.node));
}
ast::ExprTup(ref exprs) => {
try!(self.popen());
- try!(self.commasep_exprs(Inconsistent, exprs.index(&FullRange)));
+ try!(self.commasep_exprs(Inconsistent, &exprs[]));
if exprs.len() == 1 {
try!(word(&mut self.s, ","));
}
}
ast::ExprCall(ref func, ref args) => {
try!(self.print_expr_maybe_paren(&**func));
- try!(self.print_call_post(args.index(&FullRange)));
+ try!(self.print_call_post(&args[]));
}
ast::ExprMethodCall(ident, ref tys, ref args) => {
let base_args = args.slice_from(1);
try!(self.print_ident(ident.node));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
- try!(self.commasep(Inconsistent, tys.index(&FullRange),
+ try!(self.commasep(Inconsistent, &tys[],
|s, ty| s.print_type(&**ty)));
try!(word(&mut self.s, ">"));
}
try!(self.print_string(a.asm.get(), a.asm_str_style));
try!(self.word_space(":"));
- try!(self.commasep(Inconsistent, a.outputs.index(&FullRange),
+ try!(self.commasep(Inconsistent, &a.outputs[],
|s, &(ref co, ref o, is_rw)| {
match co.get().slice_shift_char() {
Some(('=', operand)) if is_rw => {
- try!(s.print_string(format!("+{}", operand).index(&FullRange),
+ try!(s.print_string(&format!("+{}", operand)[],
ast::CookedStr))
}
_ => try!(s.print_string(co.get(), ast::CookedStr))
try!(space(&mut self.s));
try!(self.word_space(":"));
- try!(self.commasep(Inconsistent, a.inputs.index(&FullRange),
+ try!(self.commasep(Inconsistent, &a.inputs[],
|s, &(ref co, ref o)| {
try!(s.print_string(co.get(), ast::CookedStr));
try!(s.popen());
try!(space(&mut self.s));
try!(self.word_space(":"));
- try!(self.commasep(Inconsistent, a.clobbers.index(&FullRange),
+ try!(self.commasep(Inconsistent, &a.clobbers[],
|s, co| {
try!(s.print_string(co.get(), ast::CookedStr));
Ok(())
pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
if self.encode_idents_with_hygiene {
let encoded = ident.encode_with_hygiene();
- try!(word(&mut self.s, encoded.index(&FullRange)))
+ try!(word(&mut self.s, &encoded[]))
} else {
try!(word(&mut self.s, token::get_ident(ident).get()))
}
}
pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
- word(&mut self.s, i.to_string().index(&FullRange))
+ word(&mut self.s, &i.to_string()[])
}
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
}
try!(self.commasep(
Inconsistent,
- data.types.index(&FullRange),
+ &data.types[],
|s, ty| s.print_type(&**ty)));
comma = true;
}
try!(word(&mut self.s, "("));
try!(self.commasep(
Inconsistent,
- data.inputs.index(&FullRange),
+ &data.inputs[],
|s, ty| s.print_type(&**ty)));
try!(word(&mut self.s, ")"));
Some(ref args) => {
if !args.is_empty() {
try!(self.popen());
- try!(self.commasep(Inconsistent, args.index(&FullRange),
+ try!(self.commasep(Inconsistent, &args[],
|s, p| s.print_pat(&**p)));
try!(self.pclose());
}
try!(self.nbsp());
try!(self.word_space("{"));
try!(self.commasep_cmnt(
- Consistent, fields.index(&FullRange),
+ Consistent, &fields[],
|s, f| {
try!(s.cbox(indent_unit));
if !f.node.is_shorthand {
ast::PatTup(ref elts) => {
try!(self.popen());
try!(self.commasep(Inconsistent,
- elts.index(&FullRange),
+ &elts[],
|s, p| s.print_pat(&**p)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
ast::PatVec(ref before, ref slice, ref after) => {
try!(word(&mut self.s, "["));
try!(self.commasep(Inconsistent,
- before.index(&FullRange),
+ &before[],
|s, p| s.print_pat(&**p)));
for p in slice.iter() {
if !before.is_empty() { try!(self.word_space(",")); }
if !after.is_empty() { try!(self.word_space(",")); }
}
try!(self.commasep(Inconsistent,
- after.index(&FullRange),
+ &after[],
|s, p| s.print_pat(&**p)));
try!(word(&mut self.s, "]"));
}
}
try!(self.cbox(indent_unit));
try!(self.ibox(0u));
- try!(self.print_outer_attributes(arm.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&arm.attrs[]));
let mut first = true;
for p in arm.pats.iter() {
if first {
// HACK(eddyb) ignore the separately printed self argument.
let args = if first {
- decl.inputs.index(&FullRange)
+ &decl.inputs[]
} else {
decl.inputs.slice_from(1)
};
ints.push(i);
}
- try!(self.commasep(Inconsistent, ints.index(&FullRange), |s, &idx| {
+ try!(self.commasep(Inconsistent, &ints[], |s, &idx| {
if idx < generics.lifetimes.len() {
let lifetime = &generics.lifetimes[idx];
s.print_lifetime_def(lifetime)
pub fn print_ty_param(&mut self, param: &ast::TyParam) -> IoResult<()> {
try!(self.print_ident(param.ident));
- try!(self.print_bounds(":", param.bounds.index(&FullRange)));
+ try!(self.print_bounds(":", ¶m.bounds[]));
match param.default {
Some(ref default) => {
try!(space(&mut self.s));
try!(word(&mut self.s, name.get()));
try!(self.popen());
try!(self.commasep(Consistent,
- items.index(&FullRange),
+ &items[],
|s, i| s.print_meta_item(&**i)));
try!(self.pclose());
}
try!(self.print_path(path, false));
try!(word(&mut self.s, "::{"));
}
- try!(self.commasep(Inconsistent, idents.index(&FullRange), |s, w| {
+ try!(self.commasep(Inconsistent, &idents[], |s, w| {
match w.node {
ast::PathListIdent { name, .. } => {
s.print_ident(name)
pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
- try!(self.print_outer_attributes(item.attrs.index(&FullRange)));
+ try!(self.print_outer_attributes(&item.attrs[]));
try!(self.print_visibility(item.vis));
match item.node {
ast::ViewItemExternCrate(id, ref optional_path, _) => {
try!(self.pclose());
}
- try!(self.print_bounds(":", bounds.index(&FullRange)));
+ try!(self.print_bounds(":", &bounds[]));
try!(self.print_fn_output(decl));
try!(self.maybe_print_comment(lit.span.lo));
match self.next_lit(lit.span.lo) {
Some(ref ltrl) => {
- return word(&mut self.s, (*ltrl).lit.index(&FullRange));
+ return word(&mut self.s, &(*ltrl).lit[]);
}
_ => ()
}
let mut res = String::from_str("b'");
ascii::escape_default(byte, |c| res.push(c as char));
res.push('\'');
- word(&mut self.s, res.index(&FullRange))
+ word(&mut self.s, &res[])
}
ast::LitChar(ch) => {
let mut res = String::from_str("'");
res.push(c);
}
res.push('\'');
- word(&mut self.s, res.index(&FullRange))
+ word(&mut self.s, &res[])
}
ast::LitInt(i, t) => {
match t {
ast::SignedIntLit(st, ast::Plus) => {
word(&mut self.s,
- ast_util::int_ty_to_string(st, Some(i as i64)).index(&FullRange))
+ &ast_util::int_ty_to_string(st, Some(i as i64))[])
}
ast::SignedIntLit(st, ast::Minus) => {
let istr = ast_util::int_ty_to_string(st, Some(-(i as i64)));
word(&mut self.s,
- format!("-{}", istr).index(&FullRange))
+ &format!("-{}", istr)[])
}
ast::UnsignedIntLit(ut) => {
word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice())
}
ast::UnsuffixedIntLit(ast::Plus) => {
- word(&mut self.s, format!("{}", i).index(&FullRange))
+ word(&mut self.s, &format!("{}", i)[])
}
ast::UnsuffixedIntLit(ast::Minus) => {
- word(&mut self.s, format!("-{}", i).index(&FullRange))
+ word(&mut self.s, &format!("-{}", i)[])
}
}
}
ast::LitFloat(ref f, t) => {
word(&mut self.s,
- format!(
+ &format!(
"{}{}",
f.get(),
- ast_util::float_ty_to_string(t).index(&FullRange)).index(&FullRange))
+ &ast_util::float_ty_to_string(t)[])[])
}
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
ast::LitBool(val) => {
ascii::escape_default(ch as u8,
|ch| escaped.push(ch as char));
}
- word(&mut self.s, format!("b\"{}\"", escaped).index(&FullRange))
+ word(&mut self.s, &format!("b\"{}\"", escaped)[])
}
}
}
comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1u);
try!(zerobreak(&mut self.s));
- try!(word(&mut self.s, cmnt.lines[0].index(&FullRange)));
+ try!(word(&mut self.s, &cmnt.lines[0][]));
zerobreak(&mut self.s)
}
comments::Isolated => {
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
- try!(word(&mut self.s, line.index(&FullRange)));
+ try!(word(&mut self.s, &line[]));
}
try!(hardbreak(&mut self.s));
}
comments::Trailing => {
try!(word(&mut self.s, " "));
if cmnt.lines.len() == 1u {
- try!(word(&mut self.s, cmnt.lines[0].index(&FullRange)));
+ try!(word(&mut self.s, &cmnt.lines[0][]));
hardbreak(&mut self.s)
} else {
try!(self.ibox(0u));
for line in cmnt.lines.iter() {
if !line.is_empty() {
- try!(word(&mut self.s, line.index(&FullRange)));
+ try!(word(&mut self.s, &line[]));
}
try!(hardbreak(&mut self.s));
}
string=st))
}
};
- word(&mut self.s, st.index(&FullRange))
+ word(&mut self.s, &st[])
}
pub fn next_comment(&mut self) -> Option<comments::Comment> {
Some(abi::Rust) => Ok(()),
Some(abi) => {
try!(self.word_nbsp("extern"));
- self.word_nbsp(abi.to_string().index(&FullRange))
+ self.word_nbsp(&abi.to_string()[])
}
None => Ok(())
}
match opt_abi {
Some(abi) => {
try!(self.word_nbsp("extern"));
- self.word_nbsp(abi.to_string().index(&FullRange))
+ self.word_nbsp(&abi.to_string()[])
}
None => Ok(())
}
if abi != abi::Rust {
try!(self.word_nbsp("extern"));
- try!(self.word_nbsp(abi.to_string().index(&FullRange)));
+ try!(self.word_nbsp(&abi.to_string()[]));
}
word(&mut self.s, "fn")
}
fn use_std(krate: &ast::Crate) -> bool {
- !attr::contains_name(krate.attrs.index(&FullRange), "no_std")
+ !attr::contains_name(&krate.attrs[], "no_std")
}
fn no_prelude(attrs: &[ast::Attribute]) -> bool {
// The name to use in `extern crate "name" as std;`
let actual_crate_name = match self.alt_std_name {
- Some(ref s) => token::intern_and_get_ident(s.index(&FullRange)),
+ Some(ref s) => token::intern_and_get_ident(&s[]),
None => token::intern_and_get_ident("std"),
};
attr::mark_used(&no_std_attr);
krate.attrs.push(no_std_attr);
- if !no_prelude(krate.attrs.index(&FullRange)) {
+ if !no_prelude(&krate.attrs[]) {
// only add `use std::prelude::*;` if there wasn't a
// `#![no_implicit_prelude]` at the crate level.
// fold_mod() will insert glob path.
}
fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
- if !no_prelude(item.attrs.index(&FullRange)) {
+ if !no_prelude(&item.attrs[]) {
// only recur if there wasn't `#![no_implicit_prelude]`
// on this item, i.e. this means that the prelude is not
// implicitly imported though the whole subtree
// We generate the test harness when building in the 'test'
// configuration, either with the '--test' or '--cfg test'
// command line options.
- let should_test = attr::contains_name(krate.config.index(&FullRange), "test");
+ let should_test = attr::contains_name(&krate.config[], "test");
// Check for #[reexport_test_harness_main = "some_name"] which
// creates a `use some_name = __test::main;`. This needs to be
// unconditional, so that the attribute is still marked as used in
// non-test builds.
let reexport_test_harness_main =
- attr::first_attr_value_str_by_name(krate.attrs.index(&FullRange),
+ attr::first_attr_value_str_by_name(&krate.attrs[],
"reexport_test_harness_main");
if should_test {
self.cx.path.push(ident);
}
debug!("current path: {}",
- ast_util::path_name_i(self.cx.path.index(&FullRange)));
+ ast_util::path_name_i(&self.cx.path[]));
if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) {
match i.node {
// When not compiling with --test we should not compile the
// #[test] functions
config::strip_items(krate, |attrs| {
- !attr::contains_name(attrs.index(&FullRange), "test") &&
- !attr::contains_name(attrs.index(&FullRange), "bench")
+ !attr::contains_name(&attrs[], "test") &&
+ !attr::contains_name(&attrs[], "bench")
})
}
fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
- let has_test_attr = attr::contains_name(i.attrs.index(&FullRange), "test");
+ let has_test_attr = attr::contains_name(&i.attrs[], "test");
fn has_test_signature(i: &ast::Item) -> HasTestSignature {
match &i.node {
}
fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
- let has_bench_attr = attr::contains_name(i.attrs.index(&FullRange), "bench");
+ let has_bench_attr = attr::contains_name(&i.attrs[], "bench");
fn has_test_signature(i: &ast::Item) -> bool {
match i.node {
mod __test {
extern crate test (name = "test", vers = "...");
fn main() {
- test::test_main_static(::os::args().index(&FullRange), tests)
+ test::test_main_static(&::os::args()[], tests)
}
static tests : &'static [test::TestDescAndFn] = &[
}
fn is_test_crate(krate: &ast::Crate) -> bool {
- match attr::find_crate_name(krate.attrs.index(&FullRange)) {
- Some(ref s) if "test" == s.get().index(&FullRange) => true,
+ match attr::find_crate_name(&krate.attrs[]) {
+ Some(ref s) if "test" == &s.get()[] => true,
_ => false
}
}
// creates $name: $expr
let field = |&: name, expr| ecx.field_imm(span, ecx.ident_of(name), expr);
- debug!("encoding {}", ast_util::path_name_i(path.index(&FullRange)));
+ debug!("encoding {}", ast_util::path_name_i(&path[]));
// path to the #[test] function: "foo::bar::baz"
- let path_string = ast_util::path_name_i(path.index(&FullRange));
- let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.index(&FullRange)));
+ let path_string = ast_util::path_name_i(&path[]);
+ let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[]));
// self::test::StaticTestName($name_expr)
let name_expr = ecx.expr_call(span,
vect: RefCell<Vec<T> >,
}
-// when traits can extend traits, we should extend index<Name,T> to get .index(&FullRange)
+// when traits can extend traits, we should extend index<Name,T> to get []
impl<T: Eq + Hash + Clone + 'static> Interner<T> {
pub fn new() -> Interner<T> {
Interner {
impl Ord for RcStr {
fn cmp(&self, other: &RcStr) -> Ordering {
- self.index(&FullRange).cmp(other.index(&FullRange))
+ self[].cmp(&other[])
}
}
impl fmt::Show for RcStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Show;
- self.index(&FullRange).fmt(f)
+ self[].fmt(f)
}
}
impl BorrowFrom<RcStr> for str {
fn borrow_from(owned: &RcStr) -> &str {
- owned.string.index(&FullRange)
+ &owned.string[]
}
}
impl Deref for RcStr {
type Target = str;
- fn deref(&self) -> &str { self.string.index(&FullRange) }
+ fn deref(&self) -> &str { &self.string[] }
}
/// A StrInterner differs from Interner<String> in that it accepts
vect: RefCell<Vec<RcStr> >,
}
-/// When traits can extend traits, we should extend index<Name,T> to get .index(&FullRange)
+/// When traits can extend traits, we should extend index<Name,T> to get []
impl StrInterner {
pub fn new() -> StrInterner {
StrInterner {
}
};
- let entry = open(term.index(&FullRange));
+ let entry = open(&term[]);
if entry.is_err() {
if os::getenv("MSYSCON").map_or(false, |s| {
"mintty.exe" == s
// Find the offset of the NUL we want to go to
- let nulpos = string_table.index(&((offset as uint) .. (string_table_bytes as uint)))
+ let nulpos = string_table[(offset as uint) .. (string_table_bytes as uint)]
.iter().position(|&b| b == 0);
match nulpos {
Some(len) => {
string_map.insert(name.to_string(),
- string_table.index(&((offset as uint) ..
- (offset as uint + len))).to_vec())
+ string_table[(offset as uint) ..
+ (offset as uint + len)].to_vec())
},
None => {
return Err("invalid file: missing NUL in \
for p in dirs_to_search.iter() {
if p.exists() {
let f = first_char.to_string();
- let newp = p.join_many(&[f.index(&FullRange), term]);
+ let newp = p.join_many(&[&f[], term]);
if newp.exists() {
return Some(box newp);
}
// on some installations the dir is named after the hex of the char (e.g. OS X)
let f = format!("{:x}", first_char as uint);
- let newp = p.join_many(&[f.index(&FullRange), term]);
+ let newp = p.join_many(&[&f[], term]);
if newp.exists() {
return Some(box newp);
}
st.write_failures().unwrap();
let s = match st.out {
- Raw(ref m) => String::from_utf8_lossy(m.index(&FullRange)),
+ Raw(ref m) => String::from_utf8_lossy(&m[]),
Pretty(_) => unreachable!()
};
#![feature(associated_types)]
extern crate core;
-use core::ops::{Index, Range, RangeTo, RangeFrom, FullRange};
+use core::ops::{Index, IndexMut, Range, RangeTo, RangeFrom, FullRange};
static mut COUNT: uint = 0;