Attribute grammar in reference manual allowed `#[foo, bar]`, which does not match parser behavior.
Also rename nonterminals to match parser code.
Fix #13825.
TARGET_CRATES := libc std green rustuv native flate arena glob term semver \
uuid serialize sync getopts collections num test time rand \
workcache url log regex graphviz core
-HOST_CRATES := syntax rustc rustdoc fourcc hexfloat regex_macros
+HOST_CRATES := syntax rustc rustdoc fourcc hexfloat regex_macros fmt_macros
CRATES := $(TARGET_CRATES) $(HOST_CRATES)
TOOLS := compiletest rustdoc rustc
DEPS_green := std rand native:context_switch
DEPS_rustuv := std native:uv native:uv_support
DEPS_native := std
-DEPS_syntax := std term serialize collections log
+DEPS_syntax := std term serialize collections log fmt_macros
DEPS_rustc := syntax native:rustllvm flate arena serialize sync getopts \
collections time log
DEPS_rustdoc := rustc native:hoedown serialize sync getopts collections \
DEPS_log := std sync
DEPS_regex := std collections
DEPS_regex_macros = syntax std regex
+DEPS_fmt_macros = std
TOOL_DEPS_compiletest := test green rustuv getopts
TOOL_DEPS_rustdoc := rustdoc native
fn make_exe_name(config: &config, testfile: &Path) -> Path {
let mut f = output_base_name(config, testfile);
if !os::consts::EXE_SUFFIX.is_empty() {
- match f.filename().map(|s| s + os::consts::EXE_SUFFIX.as_bytes()) {
+ match f.filename().map(|s| Vec::from_slice(s).append(os::consts::EXE_SUFFIX.as_bytes())) {
Some(v) => f.set_filename(v),
None => ()
}
fn aux_output_dir_name(config: &config, testfile: &Path) -> Path {
let mut f = output_base_name(config, testfile);
- match f.filename().map(|s| s + bytes!(".libaux")) {
+ match f.filename().map(|s| Vec::from_slice(s).append(bytes!(".libaux"))) {
Some(v) => f.set_filename(v),
None => ()
}
(*p).clone()
} else {
let stem = p.filestem().unwrap();
- p.with_filename(stem + bytes!("-") + suffix.as_bytes())
+ p.with_filename(Vec::from_slice(stem).append(bytes!("-")).append(suffix.as_bytes()))
}
}
~~~
let xs = [0, 1, 1, 2, 3, 5, 8];
-let ys = xs.iter().rev().skip(1).map(|&x| x * 2).collect::<~[int]>();
-assert_eq!(ys, ~[10, 6, 4, 2, 2, 0]);
+let ys = xs.iter().rev().skip(1).map(|&x| x * 2).collect::<Vec<int>>();
+assert_eq!(ys, vec![10, 6, 4, 2, 2, 0]);
~~~
The method requires a type hint for the container type, if the surrounding code
vectors is as follows:
~~~ {.ignore}
-impl<A> FromIterator<A> for ~[A] {
- pub fn from_iter<T: Iterator<A>>(iterator: &mut T) -> ~[A] {
+impl<T> FromIterator<T> for Vec<T> {
+ fn from_iter<I:Iterator<A>>(mut iterator: I) -> Vec<T> {
let (lower, _) = iterator.size_hint();
- let mut xs = with_capacity(lower);
- for x in iterator {
- xs.push(x);
+ let mut vector = Vec::with_capacity(lower);
+ for element in iterator {
+ vector.push(element);
}
- xs
+ vector
}
}
~~~
Within the body of an item that has type parameter declarations, the names of its type parameters are types:
~~~~
-fn map<A: Clone, B: Clone>(f: |A| -> B, xs: &[A]) -> ~[B] {
+fn map<A: Clone, B: Clone>(f: |A| -> B, xs: &[A]) -> Vec<B> {
if xs.len() == 0 {
- return ~[];
+ return vec![];
}
let first: B = f(xs[0].clone());
- let rest: ~[B] = map(f, xs.slice(1, xs.len()));
- return ~[first] + rest;
+ let rest: Vec<B> = map(f, xs.slice(1, xs.len()));
+ return vec![first].append(rest.as_slice());
}
~~~~
Here, `first` has type `B`, referring to `map`'s `B` type parameter;
-and `rest` has type `~[B]`, a vector type with element type `B`.
+and `rest` has type `Vec<B>`, a vector type with element type `B`.
### Self types
numbers.push(4);
numbers.push(5);
-// The type of a unique vector is written as `~[int]`
-let more_numbers: ~[int] = numbers.move_iter().collect();
+// The type of a unique vector is written as `Vec<int>`
+let more_numbers: Vec<int> = numbers.move_iter().map(|i| i+1).collect();
// The original `numbers` value can no longer be used, due to move semantics.
let ys: &mut [int] = &mut [1, 2, 3];
~~~
-Square brackets denote indexing into a vector:
+Square brackets denote indexing into a slice or fixed-size vector:
~~~~
# enum Crayon { Almond, AntiqueBrass, Apricot,
}
~~~~
-A vector can be destructured using pattern matching:
+A slice or fixed-size vector can be destructured using pattern matching:
~~~~
let numbers: &[int] = &[1, 2, 3];
~~~~
Both vectors and strings support a number of useful [methods](#methods),
-defined in [`std::vec`] and [`std::str`].
+defined in [`std::vec`], [`std::slice`], and [`std::str`].
[`std::vec`]: std/vec/index.html
+[`std::slice`]: std/slice/index.html
[`std::str`]: std/str/index.html
# Ownership escape hatches
use intrinsics;
use ptr::copy_nonoverlapping_memory;
-/// Casts the value at `src` to U. The two types must have the same length.
-#[inline]
-pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
- let mut dest: U = mem::uninit();
- let dest_ptr: *mut u8 = transmute(&mut dest);
- let src_ptr: *u8 = transmute(src);
- copy_nonoverlapping_memory(dest_ptr, src_ptr, mem::size_of::<U>());
- dest
-}
-
-/**
- * Move a thing into the void
- *
- * The forget function will take ownership of the provided value but neglect
- * to run any required cleanup or memory-management operations on it.
- */
-#[inline]
-pub unsafe fn forget<T>(thing: T) { intrinsics::forget(thing); }
-
-/**
- * Force-increment the reference count on a shared box. If used
- * carelessly, this can leak the box.
- */
-#[inline]
-pub unsafe fn bump_box_refcount<T>(t: @T) { forget(t); }
-
/**
* Transform a value of one type into a value of another type.
* Both types must have the same size and alignment.
* ```
*/
#[inline]
-pub unsafe fn transmute<L, G>(thing: L) -> G {
+pub unsafe fn transmute<T, U>(thing: T) -> U {
intrinsics::transmute(thing)
}
+/**
+ * Move a thing into the void
+ *
+ * The forget function will take ownership of the provided value but neglect
+ * to run any required cleanup or memory-management operations on it.
+ */
+#[inline]
+pub unsafe fn forget<T>(thing: T) { intrinsics::forget(thing); }
+
+/// Casts the value at `src` to U. The two types must have the same length.
+#[inline]
+pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
+ let mut dest: U = mem::uninit();
+ let dest_ptr: *mut u8 = transmute(&mut dest);
+ let src_ptr: *u8 = transmute(src);
+ copy_nonoverlapping_memory(dest_ptr, src_ptr, mem::size_of::<U>());
+ dest
+}
+
/// Coerce an immutable reference to be mutable.
#[inline]
#[deprecated="casting &T to &mut T is undefined behaviour: use Cell<T>, RefCell<T> or Unsafe<T>"]
#[cfg(test)]
mod tests {
- use cast::{bump_box_refcount, transmute};
+ use cast::transmute;
use raw;
use realstd::str::StrAllocating;
assert_eq!(1u, unsafe { ::cast::transmute_copy(&1) });
}
- #[test]
- fn test_bump_managed_refcount() {
- unsafe {
- let managed = @"box box box".to_owned(); // refcount 1
- bump_box_refcount(managed); // refcount 2
- let ptr: *int = transmute(managed); // refcount 2
- let _box1: @~str = ::cast::transmute_copy(&ptr);
- let _box2: @~str = ::cast::transmute_copy(&ptr);
- assert!(*_box1 == "box box box".to_owned());
- assert!(*_box2 == "box box box".to_owned());
- // Will destroy _box1 and _box2. Without the bump, this would
- // use-after-free. With too many bumps, it would leak.
- }
- }
-
#[test]
fn test_transmute() {
unsafe {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Types dealing with dynamic mutability
+//! Types that provide interior mutability.
use clone::Clone;
use cmp::Eq;
///
/// ```rust
/// let a = [1, 2, 3, 4, 5];
- /// let b: ~[int] = a.iter().map(|&x| x).collect();
- /// assert!(a == b);
+ /// let b: Vec<int> = a.iter().map(|&x| x).collect();
+ /// assert!(a.as_slice() == b.as_slice());
/// ```
#[inline]
fn collect<B: FromIterator<A>>(&mut self) -> B {
#[test]
fn test_counter_from_iter() {
let it = count(0, 5).take(10);
- let xs: ~[int] = FromIterator::from_iter(it);
- assert_eq!(xs, box [0, 5, 10, 15, 20, 25, 30, 35, 40, 45]);
+ let xs: Vec<int> = FromIterator::from_iter(it);
+ assert_eq!(xs, vec![0, 5, 10, 15, 20, 25, 30, 35, 40, 45]);
}
#[test]
fn test_filter_map() {
let mut it = count(0u, 1u).take(10)
.filter_map(|x| if x % 2 == 0 { Some(x*x) } else { None });
- assert_eq!(it.collect::<~[uint]>(), box [0*0, 2*2, 4*4, 6*6, 8*8]);
+ assert_eq!(it.collect::<Vec<uint>>(), vec![0*0, 2*2, 4*4, 6*6, 8*8]);
}
#[test]
let ys = xs.iter()
.map(|&x| x)
.inspect(|_| n += 1)
- .collect::<~[uint]>();
+ .collect::<Vec<uint>>();
assert_eq!(n, xs.len());
assert_eq!(xs.as_slice(), ys.as_slice());
#[test]
fn test_collect() {
- let a = box [1, 2, 3, 4, 5];
- let b: ~[int] = a.iter().map(|&x| x).collect();
+ let a = vec![1, 2, 3, 4, 5];
+ let b: Vec<int> = a.iter().map(|&x| x).collect();
assert_eq!(a, b);
}
let mut it = xs.iter();
it.next();
it.next();
- assert_eq!(it.rev().map(|&x| x).collect::<~[int]>(), box [16, 14, 12, 10, 8, 6]);
+ assert_eq!(it.rev().map(|&x| x).collect::<Vec<int>>(), vec![16, 14, 12, 10, 8, 6]);
}
#[test]
#[test]
fn test_double_ended_range() {
- assert_eq!(range(11i, 14).rev().collect::<~[int]>(), box [13i, 12, 11]);
+ assert_eq!(range(11i, 14).rev().collect::<Vec<int>>(), vec![13i, 12, 11]);
for _ in range(10i, 0).rev() {
fail!("unreachable");
}
- assert_eq!(range(11u, 14).rev().collect::<~[uint]>(), box [13u, 12, 11]);
+ assert_eq!(range(11u, 14).rev().collect::<Vec<uint>>(), vec![13u, 12, 11]);
for _ in range(10u, 0).rev() {
fail!("unreachable");
}
}
}
- assert_eq!(range(0i, 5).collect::<~[int]>(), box [0i, 1, 2, 3, 4]);
- assert_eq!(range(-10i, -1).collect::<~[int]>(), box [-10, -9, -8, -7, -6, -5, -4, -3, -2]);
- assert_eq!(range(0i, 5).rev().collect::<~[int]>(), box [4, 3, 2, 1, 0]);
- assert_eq!(range(200, -5).collect::<~[int]>(), box []);
- assert_eq!(range(200, -5).rev().collect::<~[int]>(), box []);
- assert_eq!(range(200, 200).collect::<~[int]>(), box []);
- assert_eq!(range(200, 200).rev().collect::<~[int]>(), box []);
+ assert_eq!(range(0i, 5).collect::<Vec<int>>(), vec![0i, 1, 2, 3, 4]);
+ assert_eq!(range(-10i, -1).collect::<Vec<int>>(),
+ vec![-10, -9, -8, -7, -6, -5, -4, -3, -2]);
+ assert_eq!(range(0i, 5).rev().collect::<Vec<int>>(), vec![4, 3, 2, 1, 0]);
+ assert_eq!(range(200, -5).collect::<Vec<int>>(), vec![]);
+ assert_eq!(range(200, -5).rev().collect::<Vec<int>>(), vec![]);
+ assert_eq!(range(200, 200).collect::<Vec<int>>(), vec![]);
+ assert_eq!(range(200, 200).rev().collect::<Vec<int>>(), vec![]);
assert_eq!(range(0i, 100).size_hint(), (100, Some(100)));
// this test is only meaningful when sizeof uint < sizeof u64
#[test]
fn test_range_inclusive() {
- assert_eq!(range_inclusive(0i, 5).collect::<~[int]>(), box [0i, 1, 2, 3, 4, 5]);
- assert_eq!(range_inclusive(0i, 5).rev().collect::<~[int]>(), box [5i, 4, 3, 2, 1, 0]);
- assert_eq!(range_inclusive(200, -5).collect::<~[int]>(), box []);
- assert_eq!(range_inclusive(200, -5).rev().collect::<~[int]>(), box []);
- assert_eq!(range_inclusive(200, 200).collect::<~[int]>(), box [200]);
- assert_eq!(range_inclusive(200, 200).rev().collect::<~[int]>(), box [200]);
+ assert_eq!(range_inclusive(0i, 5).collect::<Vec<int>>(), vec![0i, 1, 2, 3, 4, 5]);
+ assert_eq!(range_inclusive(0i, 5).rev().collect::<Vec<int>>(), vec![5i, 4, 3, 2, 1, 0]);
+ assert_eq!(range_inclusive(200, -5).collect::<Vec<int>>(), vec![]);
+ assert_eq!(range_inclusive(200, -5).rev().collect::<Vec<int>>(), vec![]);
+ assert_eq!(range_inclusive(200, 200).collect::<Vec<int>>(), vec![200]);
+ assert_eq!(range_inclusive(200, 200).rev().collect::<Vec<int>>(), vec![200]);
}
#[test]
fn test_range_step() {
- assert_eq!(range_step(0i, 20, 5).collect::<~[int]>(), box [0, 5, 10, 15]);
- assert_eq!(range_step(20i, 0, -5).collect::<~[int]>(), box [20, 15, 10, 5]);
- assert_eq!(range_step(20i, 0, -6).collect::<~[int]>(), box [20, 14, 8, 2]);
- assert_eq!(range_step(200u8, 255, 50).collect::<~[u8]>(), box [200u8, 250]);
- assert_eq!(range_step(200, -5, 1).collect::<~[int]>(), box []);
- assert_eq!(range_step(200, 200, 1).collect::<~[int]>(), box []);
+ assert_eq!(range_step(0i, 20, 5).collect::<Vec<int>>(), vec![0, 5, 10, 15]);
+ assert_eq!(range_step(20i, 0, -5).collect::<Vec<int>>(), vec![20, 15, 10, 5]);
+ assert_eq!(range_step(20i, 0, -6).collect::<Vec<int>>(), vec![20, 14, 8, 2]);
+ assert_eq!(range_step(200u8, 255, 50).collect::<Vec<u8>>(), vec![200u8, 250]);
+ assert_eq!(range_step(200, -5, 1).collect::<Vec<int>>(), vec![]);
+ assert_eq!(range_step(200, 200, 1).collect::<Vec<int>>(), vec![]);
}
#[test]
fn test_range_step_inclusive() {
- assert_eq!(range_step_inclusive(0i, 20, 5).collect::<~[int]>(), box [0, 5, 10, 15, 20]);
- assert_eq!(range_step_inclusive(20i, 0, -5).collect::<~[int]>(), box [20, 15, 10, 5, 0]);
- assert_eq!(range_step_inclusive(20i, 0, -6).collect::<~[int]>(), box [20, 14, 8, 2]);
- assert_eq!(range_step_inclusive(200u8, 255, 50).collect::<~[u8]>(), box [200u8, 250]);
- assert_eq!(range_step_inclusive(200, -5, 1).collect::<~[int]>(), box []);
- assert_eq!(range_step_inclusive(200, 200, 1).collect::<~[int]>(), box [200]);
+ assert_eq!(range_step_inclusive(0i, 20, 5).collect::<Vec<int>>(), vec![0, 5, 10, 15, 20]);
+ assert_eq!(range_step_inclusive(20i, 0, -5).collect::<Vec<int>>(), vec![20, 15, 10, 5, 0]);
+ assert_eq!(range_step_inclusive(20i, 0, -6).collect::<Vec<int>>(), vec![20, 14, 8, 2]);
+ assert_eq!(range_step_inclusive(200u8, 255, 50).collect::<Vec<u8>>(), vec![200u8, 250]);
+ assert_eq!(range_step_inclusive(200, -5, 1).collect::<Vec<int>>(), vec![]);
+ assert_eq!(range_step_inclusive(200, 200, 1).collect::<Vec<int>>(), vec![200]);
}
#[test]
#[cfg(test)] pub use realstd::rt; // needed for fail!()
#[cfg(test)] pub use realstd::option; // needed for assert!()
#[cfg(test)] pub use realstd::os; // needed for tests
+ #[cfg(test)] pub use realstd::slice; // needed for tests
+ #[cfg(test)] pub use realstd::vec; // needed for vec![]
}
#[test]
fn test_collect() {
- let v: Option<~[int]> = collect(range(0, 0)
- .map(|_| Some(0)));
- assert_eq!(v, Some(box []));
+ let v: Option<Vec<int>> = collect(range(0, 0)
+ .map(|_| Some(0)));
+ assert_eq!(v, Some(vec![]));
- let v: Option<~[int]> = collect(range(0, 3)
- .map(|x| Some(x)));
- assert_eq!(v, Some(box [0, 1, 2]));
+ let v: Option<Vec<int>> = collect(range(0, 3)
+ .map(|x| Some(x)));
+ assert_eq!(v, Some(vec![0, 1, 2]));
- let v: Option<~[int]> = collect(range(0, 3)
- .map(|x| if x > 1 { None } else { Some(x) }));
+ let v: Option<Vec<int>> = collect(range(0, 3)
+ .map(|x| if x > 1 { None } else { Some(x) }));
assert_eq!(v, None);
// test that it does not take more elements than it needs
let mut functions = [|| Some(()), || None, || fail!()];
- let v: Option<~[()]> = collect(functions.mut_iter().map(|f| (*f)()));
+ let v: Option<Vec<()>> = collect(functions.mut_iter().map(|f| (*f)()));
assert_eq!(v, None);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// FIXME: talk about offset, copy_memory, copy_nonoverlapping_memory
+
//! Conveniences for working with unsafe pointers, the `*T`, and `*mut T` types.
//!
//! Working with unsafe pointers in Rust is fairly uncommon,
#[test]
fn test_collect() {
- let v: Result<~[int], ()> = collect(range(0, 0).map(|_| Ok::<int, ()>(0)));
- assert_eq!(v, Ok(box []));
+ let v: Result<Vec<int>, ()> = collect(range(0, 0).map(|_| Ok::<int, ()>(0)));
+ assert_eq!(v, Ok(vec![]));
- let v: Result<~[int], ()> = collect(range(0, 3).map(|x| Ok::<int, ()>(x)));
- assert_eq!(v, Ok(box [0, 1, 2]));
+ let v: Result<Vec<int>, ()> = collect(range(0, 3).map(|x| Ok::<int, ()>(x)));
+ assert_eq!(v, Ok(vec![0, 1, 2]));
- let v: Result<~[int], int> = collect(range(0, 3)
- .map(|x| if x > 1 { Err(x) } else { Ok(x) }));
+ let v: Result<Vec<int>, int> = collect(range(0, 3)
+ .map(|x| if x > 1 { Err(x) } else { Ok(x) }));
assert_eq!(v, Err(2));
// test that it does not take more elements than it needs
let mut functions = [|| Ok(()), || Err(1), || fail!()];
- let v: Result<~[()], int> = collect(functions.mut_iter().map(|f| (*f)()));
+ let v: Result<Vec<()>, int> = collect(functions.mut_iter().map(|f| (*f)()));
assert_eq!(v, Err(1));
}
use clone::Clone;
use container::Container;
use default::Default;
+use finally::try_finally;
use intrinsics;
-use iter::{Iterator, FromIterator};
+use iter::{range, Iterator, FromIterator};
use mem;
use num::{CheckedMul, CheckedAdd};
use option::{Some, None};
use str::StrSlice;
#[cfg(not(test))] use ops::Add;
-#[cfg(not(test))] use slice::Vector;
#[allow(ctypes)]
extern {
impl<A: Clone> Clone for ~[A] {
#[inline]
fn clone(&self) -> ~[A] {
- self.iter().map(|a| a.clone()).collect()
- }
-}
-
-impl<A> FromIterator<A> for ~[A] {
- fn from_iter<T: Iterator<A>>(mut iterator: T) -> ~[A] {
- let (lower, _) = iterator.size_hint();
- let cap = if lower == 0 {16} else {lower};
- let mut cap = cap.checked_mul(&mem::size_of::<A>()).unwrap();
- let mut len = 0;
+ let len = self.len();
+ let data_size = len.checked_mul(&mem::size_of::<A>()).unwrap();
+ let size = mem::size_of::<Vec<()>>().checked_add(&data_size).unwrap();
unsafe {
- let mut ptr = alloc(cap) as *mut Vec<A>;
- let mut ret = cast::transmute(ptr);
- for elt in iterator {
- if len * mem::size_of::<A>() >= cap {
- cap = cap.checked_mul(&2).unwrap();
- let ptr2 = alloc(cap) as *mut Vec<A>;
- ptr::copy_nonoverlapping_memory(&mut (*ptr2).data,
- &(*ptr).data,
- len);
- free(ptr as *u8);
- cast::forget(ret);
- ret = cast::transmute(ptr2);
- ptr = ptr2;
- }
-
- let base = &mut (*ptr).data as *mut A;
- intrinsics::move_val_init(&mut *base.offset(len as int), elt);
- len += 1;
- (*ptr).fill = len * mem::nonzero_size_of::<A>();
- }
- ret
+ let ret = alloc(size) as *mut Vec<A>;
+
+ (*ret).fill = len * mem::nonzero_size_of::<A>();
+ (*ret).alloc = len * mem::nonzero_size_of::<A>();
+
+ let mut i = 0;
+ let p = &mut (*ret).data as *mut _ as *mut A;
+ try_finally(
+ &mut i, (),
+ |i, ()| while *i < len {
+ mem::move_val_init(
+ &mut(*p.offset(*i as int)),
+ self.unsafe_ref(*i).clone());
+ *i += 1;
+ },
+ |i| if *i < len {
+ // we must be failing, clean up after ourselves
+ for j in range(0, *i as int) {
+ ptr::read(&*p.offset(j));
+ }
+ free(ret as *u8);
+ });
+ cast::transmute(ret)
}
}
}
-
-#[cfg(not(test))]
-impl<'a,T:Clone, V: Vector<T>> Add<V, ~[T]> for &'a [T] {
- #[inline]
- fn add(&self, rhs: &V) -> ~[T] {
- let first = self.iter().map(|t| t.clone());
- first.chain(rhs.as_slice().iter().map(|t| t.clone())).collect()
- }
-}
-
-#[cfg(not(test))]
-impl<T:Clone, V: Vector<T>> Add<V, ~[T]> for ~[T] {
- #[inline]
- fn add(&self, rhs: &V) -> ~[T] {
- self.as_slice() + rhs.as_slice()
- }
-}
/// 0x0073, 0xDD1E, 0x0069, 0x0063,
/// 0xD834];
///
-/// assert_eq!(str::utf16_items(v).collect::<~[_]>(),
-/// ~[ScalarValue('𝄞'),
-/// ScalarValue('m'), ScalarValue('u'), ScalarValue('s'),
-/// LoneSurrogate(0xDD1E),
-/// ScalarValue('i'), ScalarValue('c'),
-/// LoneSurrogate(0xD834)]);
+/// assert_eq!(str::utf16_items(v).collect::<Vec<_>>(),
+/// vec![ScalarValue('𝄞'),
+/// ScalarValue('m'), ScalarValue('u'), ScalarValue('s'),
+/// LoneSurrogate(0xDD1E),
+/// ScalarValue('i'), ScalarValue('c'),
+/// LoneSurrogate(0xD834)]);
/// ```
pub fn utf16_items<'a>(v: &'a [u16]) -> UTF16Items<'a> {
UTF16Items { iter : v.iter() }
/// # Example
///
/// ```rust
- /// let v: ~[char] = "abc åäö".chars().collect();
- /// assert_eq!(v, ~['a', 'b', 'c', ' ', 'å', 'ä', 'ö']);
+ /// let v: Vec<char> = "abc åäö".chars().collect();
+ /// assert_eq!(v, vec!['a', 'b', 'c', ' ', 'å', 'ä', 'ö']);
/// ```
fn chars(&self) -> Chars<'a>;
/// # Example
///
/// ```rust
- /// let v: ~[&str] = "Mary had a little lamb".split(' ').collect();
- /// assert_eq!(v, ~["Mary", "had", "a", "little", "lamb"]);
+ /// let v: Vec<&str> = "Mary had a little lamb".split(' ').collect();
+ /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]);
///
- /// let v: ~[&str] = "abc1def2ghi".split(|c: char| c.is_digit()).collect();
- /// assert_eq!(v, ~["abc", "def", "ghi"]);
+ /// let v: Vec<&str> = "abc1def2ghi".split(|c: char| c.is_digit()).collect();
+ /// assert_eq!(v, vec!["abc", "def", "ghi"]);
///
- /// let v: ~[&str] = "lionXXtigerXleopard".split('X').collect();
- /// assert_eq!(v, ~["lion", "", "tiger", "leopard"]);
+ /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').collect();
+ /// assert_eq!(v, vec!["lion", "", "tiger", "leopard"]);
/// ```
fn split<Sep: CharEq>(&self, sep: Sep) -> CharSplits<'a, Sep>;
/// # Example
///
/// ```rust
- /// let v: ~[&str] = "Mary had a little lambda".splitn(' ', 2).collect();
- /// assert_eq!(v, ~["Mary", "had", "a little lambda"]);
+ /// let v: Vec<&str> = "Mary had a little lambda".splitn(' ', 2).collect();
+ /// assert_eq!(v, vec!["Mary", "had", "a little lambda"]);
///
- /// let v: ~[&str] = "abc1def2ghi".splitn(|c: char| c.is_digit(), 1).collect();
- /// assert_eq!(v, ~["abc", "def2ghi"]);
+ /// let v: Vec<&str> = "abc1def2ghi".splitn(|c: char| c.is_digit(), 1).collect();
+ /// assert_eq!(v, vec!["abc", "def2ghi"]);
///
- /// let v: ~[&str] = "lionXXtigerXleopard".splitn('X', 2).collect();
- /// assert_eq!(v, ~["lion", "", "tigerXleopard"]);
+ /// let v: Vec<&str> = "lionXXtigerXleopard".splitn('X', 2).collect();
+ /// assert_eq!(v, vec!["lion", "", "tigerXleopard"]);
/// ```
fn splitn<Sep: CharEq>(&self, sep: Sep, count: uint) -> CharSplitsN<'a, Sep>;
/// # Example
///
/// ```rust
- /// let v: ~[&str] = "A.B.".split_terminator('.').collect();
- /// assert_eq!(v, ~["A", "B"]);
+ /// let v: Vec<&str> = "A.B.".split_terminator('.').collect();
+ /// assert_eq!(v, vec!["A", "B"]);
///
- /// let v: ~[&str] = "A..B..".split_terminator('.').collect();
- /// assert_eq!(v, ~["A", "", "B", ""]);
+ /// let v: Vec<&str> = "A..B..".split_terminator('.').collect();
+ /// assert_eq!(v, vec!["A", "", "B", ""]);
///
- /// let v: ~[&str] = "Mary had a little lamb".split(' ').rev().collect();
- /// assert_eq!(v, ~["lamb", "little", "a", "had", "Mary"]);
+ /// let v: Vec<&str> = "Mary had a little lamb".split(' ').rev().collect();
+ /// assert_eq!(v, vec!["lamb", "little", "a", "had", "Mary"]);
///
- /// let v: ~[&str] = "abc1def2ghi".split(|c: char| c.is_digit()).rev().collect();
- /// assert_eq!(v, ~["ghi", "def", "abc"]);
+ /// let v: Vec<&str> = "abc1def2ghi".split(|c: char| c.is_digit()).rev().collect();
+ /// assert_eq!(v, vec!["ghi", "def", "abc"]);
///
- /// let v: ~[&str] = "lionXXtigerXleopard".split('X').rev().collect();
- /// assert_eq!(v, ~["leopard", "tiger", "", "lion"]);
+ /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').rev().collect();
+ /// assert_eq!(v, vec!["leopard", "tiger", "", "lion"]);
/// ```
fn split_terminator<Sep: CharEq>(&self, sep: Sep) -> CharSplits<'a, Sep>;
/// # Example
///
/// ```rust
- /// let v: ~[&str] = "Mary had a little lamb".rsplitn(' ', 2).collect();
- /// assert_eq!(v, ~["lamb", "little", "Mary had a"]);
+ /// let v: Vec<&str> = "Mary had a little lamb".rsplitn(' ', 2).collect();
+ /// assert_eq!(v, vec!["lamb", "little", "Mary had a"]);
///
- /// let v: ~[&str] = "abc1def2ghi".rsplitn(|c: char| c.is_digit(), 1).collect();
- /// assert_eq!(v, ~["ghi", "abc1def"]);
+ /// let v: Vec<&str> = "abc1def2ghi".rsplitn(|c: char| c.is_digit(), 1).collect();
+ /// assert_eq!(v, vec!["ghi", "abc1def"]);
///
- /// let v: ~[&str] = "lionXXtigerXleopard".rsplitn('X', 2).collect();
- /// assert_eq!(v, ~["leopard", "tiger", "lionX"]);
+ /// let v: Vec<&str> = "lionXXtigerXleopard".rsplitn('X', 2).collect();
+ /// assert_eq!(v, vec!["leopard", "tiger", "lionX"]);
/// ```
fn rsplitn<Sep: CharEq>(&self, sep: Sep, count: uint) -> CharSplitsN<'a, Sep>;
/// # Example
///
/// ```rust
- /// let v: ~[(uint, uint)] = "abcXXXabcYYYabc".match_indices("abc").collect();
- /// assert_eq!(v, ~[(0,3), (6,9), (12,15)]);
+ /// let v: Vec<(uint, uint)> = "abcXXXabcYYYabc".match_indices("abc").collect();
+ /// assert_eq!(v, vec![(0,3), (6,9), (12,15)]);
///
- /// let v: ~[(uint, uint)] = "1abcabc2".match_indices("abc").collect();
- /// assert_eq!(v, ~[(1,4), (4,7)]);
+ /// let v: Vec<(uint, uint)> = "1abcabc2".match_indices("abc").collect();
+ /// assert_eq!(v, vec![(1,4), (4,7)]);
///
- /// let v: ~[(uint, uint)] = "ababa".match_indices("aba").collect();
- /// assert_eq!(v, ~[(0, 3)]); // only the first `aba`
+ /// let v: Vec<(uint, uint)> = "ababa".match_indices("aba").collect();
+ /// assert_eq!(v, vec![(0, 3)]); // only the first `aba`
/// ```
fn match_indices(&self, sep: &'a str) -> MatchIndices<'a>;
/// # Example
///
/// ```rust
- /// let v: ~[&str] = "abcXXXabcYYYabc".split_str("abc").collect();
- /// assert_eq!(v, ~["", "XXX", "YYY", ""]);
+ /// let v: Vec<&str> = "abcXXXabcYYYabc".split_str("abc").collect();
+ /// assert_eq!(v, vec!["", "XXX", "YYY", ""]);
///
- /// let v: ~[&str] = "1abcabc2".split_str("abc").collect();
- /// assert_eq!(v, ~["1", "", "2"]);
+ /// let v: Vec<&str> = "1abcabc2".split_str("abc").collect();
+ /// assert_eq!(v, vec!["1", "", "2"]);
/// ```
fn split_str(&self, &'a str) -> StrSplits<'a>;
///
/// ```rust
/// let four_lines = "foo\nbar\n\nbaz\n";
- /// let v: ~[&str] = four_lines.lines().collect();
- /// assert_eq!(v, ~["foo", "bar", "", "baz"]);
+ /// let v: Vec<&str> = four_lines.lines().collect();
+ /// assert_eq!(v, vec!["foo", "bar", "", "baz"]);
/// ```
fn lines(&self) -> CharSplits<'a, char>;
///
/// ```rust
/// let four_lines = "foo\r\nbar\n\r\nbaz\n";
- /// let v: ~[&str] = four_lines.lines_any().collect();
- /// assert_eq!(v, ~["foo", "bar", "", "baz"]);
+ /// let v: Vec<&str> = four_lines.lines_any().collect();
+ /// assert_eq!(v, vec!["foo", "bar", "", "baz"]);
/// ```
fn lines_any(&self) -> AnyLines<'a>;
///
/// ```rust
/// let some_words = " Mary had\ta little \n\t lamb";
- /// let v: ~[&str] = some_words.words().collect();
- /// assert_eq!(v, ~["Mary", "had", "a", "little", "lamb"]);
+ /// let v: Vec<&str> = some_words.words().collect();
+ /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]);
/// ```
fn words(&self) -> Words<'a>;
///
/// ```rust
/// let string = "a\nb\nc";
- /// let lines: ~[&str] = string.lines().collect();
+ /// let lines: Vec<&str> = string.lines().collect();
+ /// let lines = lines.as_slice();
///
/// assert!(string.subslice_offset(lines[0]) == 0); // &"a"
/// assert!(string.subslice_offset(lines[1]) == 2); // &"b"
--- /dev/null
+// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Macro support for format strings
+//!
+//! These structures are used when parsing format strings for the compiler.
+//! Parsing does not happen at runtime: structures of `std::fmt::rt` are
+//! generated instead.
+
+#![crate_id = "fmt_macros#0.11-pre"]
+#![license = "MIT/ASL2"]
+#![crate_type = "rlib"]
+#![crate_type = "dylib"]
+#![feature(macro_rules, globs)]
+#![experimental]
+
+use std::char;
+use std::str;
+
+/// A piece is a portion of the format string which represents the next part
+/// to emit. These are emitted as a stream by the `Parser` class.
+#[deriving(Eq)]
+pub enum Piece<'a> {
+ /// A literal string which should directly be emitted
+ String(&'a str),
+ /// A back-reference to whatever the current argument is. This is used
+ /// inside of a method call to refer back to the original argument.
+ CurrentArgument,
+ /// This describes that formatting should process the next argument (as
+ /// specified inside) for emission.
+ Argument(Argument<'a>),
+}
+
+/// Representation of an argument specification.
+#[deriving(Eq)]
+pub struct Argument<'a> {
+ /// Where to find this argument
+ pub position: Position<'a>,
+ /// How to format the argument
+ pub format: FormatSpec<'a>,
+ /// If not `None`, what method to invoke on the argument
+ pub method: Option<Box<Method<'a>>>
+}
+
+/// Specification for the formatting of an argument in the format string.
+#[deriving(Eq)]
+pub struct FormatSpec<'a> {
+ /// Optionally specified character to fill alignment with
+ pub fill: Option<char>,
+ /// Optionally specified alignment
+ pub align: Alignment,
+ /// Packed version of various flags provided
+ pub flags: uint,
+ /// The integer precision to use
+ pub precision: Count<'a>,
+ /// The string width requested for the resulting format
+ pub width: Count<'a>,
+ /// The descriptor string representing the name of the format desired for
+ /// this argument, this can be empty or any number of characters, although
+ /// it is required to be one word.
+ pub ty: &'a str
+}
+
+/// Enum describing where an argument for a format can be located.
+#[deriving(Eq)]
+pub enum Position<'a> {
+ /// The argument will be in the next position. This is the default.
+ ArgumentNext,
+ /// The argument is located at a specific index.
+ ArgumentIs(uint),
+ /// The argument has a name.
+ ArgumentNamed(&'a str),
+}
+
+/// Enum of alignments which are supported.
+#[deriving(Eq)]
+pub enum Alignment {
+ /// The value will be aligned to the left.
+ AlignLeft,
+ /// The value will be aligned to the right.
+ AlignRight,
+ /// The value will take on a default alignment.
+ AlignUnknown,
+}
+
+/// Various flags which can be applied to format strings. The meaning of these
+/// flags is defined by the formatters themselves.
+#[deriving(Eq)]
+pub enum Flag {
+ /// A `+` will be used to denote positive numbers.
+ FlagSignPlus,
+ /// A `-` will be used to denote negative numbers. This is the default.
+ FlagSignMinus,
+ /// An alternate form will be used for the value. In the case of numbers,
+ /// this means that the number will be prefixed with the supplied string.
+ FlagAlternate,
+ /// For numbers, this means that the number will be padded with zeroes,
+ /// and the sign (`+` or `-`) will precede them.
+ FlagSignAwareZeroPad,
+}
+
+/// A count is used for the precision and width parameters of an integer, and
+/// can reference either an argument or a literal integer.
+#[deriving(Eq)]
+pub enum Count<'a> {
+ /// The count is specified explicitly.
+ CountIs(uint),
+ /// The count is specified by the argument with the given name.
+ CountIsName(&'a str),
+ /// The count is specified by the argument at the given index.
+ CountIsParam(uint),
+ /// The count is specified by the next parameter.
+ CountIsNextParam,
+ /// The count is implied and cannot be explicitly specified.
+ CountImplied,
+}
+
+/// Enum describing all of the possible methods which the formatting language
+/// currently supports.
+#[deriving(Eq)]
+pub enum Method<'a> {
+ /// A plural method selects on an integer over a list of either integer or
+ /// keyword-defined clauses. The meaning of the keywords is defined by the
+ /// current locale.
+ ///
+ /// An offset is optionally present at the beginning which is used to
+ /// match against keywords, but it is not matched against the literal
+ /// integers.
+ ///
+ /// The final element of this enum is the default "other" case which is
+ /// always required to be specified.
+ Plural(Option<uint>, Vec<PluralArm<'a>>, Vec<Piece<'a>>),
+
+ /// A select method selects over a string. Each arm is a different string
+ /// which can be selected for.
+ ///
+ /// As with `Plural`, a default "other" case is required as well.
+ Select(Vec<SelectArm<'a>>, Vec<Piece<'a>>),
+}
+
+/// A selector for what pluralization a plural method should take
+#[deriving(Eq, TotalEq, Hash)]
+pub enum PluralSelector {
+ /// One of the plural keywords should be used
+ Keyword(PluralKeyword),
+ /// A literal pluralization should be used
+ Literal(uint),
+}
+
+/// Structure representing one "arm" of the `plural` function.
+#[deriving(Eq)]
+pub struct PluralArm<'a> {
+ /// A selector can either be specified by a keyword or with an integer
+ /// literal.
+ pub selector: PluralSelector,
+ /// Array of pieces which are the format of this arm
+ pub result: Vec<Piece<'a>>,
+}
+
+/// Enum of the 5 CLDR plural keywords. There is one more, "other", but that
+/// is specially placed in the `Plural` variant of `Method`.
+///
+/// http://www.icu-project.org/apiref/icu4c/classicu_1_1PluralRules.html
+#[deriving(Eq, TotalEq, Hash, Show)]
+#[allow(missing_doc)]
+pub enum PluralKeyword {
+ /// The plural form for zero objects.
+ Zero,
+ /// The plural form for one object.
+ One,
+ /// The plural form for two objects.
+ Two,
+ /// The plural form for few objects.
+ Few,
+ /// The plural form for many objects.
+ Many,
+}
+
+/// Structure representing one "arm" of the `select` function.
+#[deriving(Eq)]
+pub struct SelectArm<'a> {
+ /// String selector which guards this arm
+ pub selector: &'a str,
+ /// Array of pieces which are the format of this arm
+ pub result: Vec<Piece<'a>>,
+}
+
+/// The parser structure for interpreting the input format string. This is
+/// modelled as an iterator over `Piece` structures to form a stream of tokens
+/// being output.
+///
+/// This is a recursive-descent parser for the sake of simplicity, and if
+/// necessary there's probably lots of room for improvement performance-wise.
+pub struct Parser<'a> {
+ input: &'a str,
+ cur: str::CharOffsets<'a>,
+ depth: uint,
+ /// Error messages accumulated during parsing
+ pub errors: Vec<~str>,
+}
+
+impl<'a> Iterator<Piece<'a>> for Parser<'a> {
+ fn next(&mut self) -> Option<Piece<'a>> {
+ match self.cur.clone().next() {
+ Some((_, '#')) => { self.cur.next(); Some(CurrentArgument) }
+ Some((_, '{')) => {
+ self.cur.next();
+ let ret = Some(Argument(self.argument()));
+ self.must_consume('}');
+ ret
+ }
+ Some((pos, '\\')) => {
+ self.cur.next();
+ self.escape(); // ensure it's a valid escape sequence
+ Some(String(self.string(pos + 1))) // skip the '\' character
+ }
+ Some((_, '}')) if self.depth == 0 => {
+ self.cur.next();
+ self.err("unmatched `}` found");
+ None
+ }
+ Some((_, '}')) | None => { None }
+ Some((pos, _)) => {
+ Some(String(self.string(pos)))
+ }
+ }
+ }
+}
+
+impl<'a> Parser<'a> {
+ /// Creates a new parser for the given format string
+ pub fn new<'a>(s: &'a str) -> Parser<'a> {
+ Parser {
+ input: s,
+ cur: s.char_indices(),
+ depth: 0,
+ errors: vec!(),
+ }
+ }
+
+ /// Notifies of an error. The message doesn't actually need to be of type
+ /// ~str, but I think it does when this eventually uses conditions so it
+ /// might as well start using it now.
+ fn err(&mut self, msg: &str) {
+ self.errors.push(msg.to_owned());
+ }
+
+ /// Optionally consumes the specified character. If the character is not at
+ /// the current position, then the current iterator isn't moved and false is
+ /// returned, otherwise the character is consumed and true is returned.
+ fn consume(&mut self, c: char) -> bool {
+ match self.cur.clone().next() {
+ Some((_, maybe)) if c == maybe => {
+ self.cur.next();
+ true
+ }
+ Some(..) | None => false,
+ }
+ }
+
+ /// Forces consumption of the specified character. If the character is not
+ /// found, an error is emitted.
+ fn must_consume(&mut self, c: char) {
+ self.ws();
+ match self.cur.clone().next() {
+ Some((_, maybe)) if c == maybe => {
+ self.cur.next();
+ }
+ Some((_, other)) => {
+ self.err(
+ format!("expected `{}` but found `{}`", c, other));
+ }
+ None => {
+ self.err(
+ format!("expected `{}` but string was terminated", c));
+ }
+ }
+ }
+
+ /// Attempts to consume any amount of whitespace followed by a character
+ fn wsconsume(&mut self, c: char) -> bool {
+ self.ws(); self.consume(c)
+ }
+
+ /// Consumes all whitespace characters until the first non-whitespace
+ /// character
+ fn ws(&mut self) {
+ loop {
+ match self.cur.clone().next() {
+ Some((_, c)) if char::is_whitespace(c) => { self.cur.next(); }
+ Some(..) | None => { return }
+ }
+ }
+ }
+
+ /// Consumes an escape sequence, failing if there is not a valid character
+ /// to be escaped.
+ fn escape(&mut self) -> char {
+ match self.cur.next() {
+ Some((_, c @ '#')) | Some((_, c @ '{')) |
+ Some((_, c @ '\\')) | Some((_, c @ '}')) => { c }
+ Some((_, c)) => {
+ self.err(format!("invalid escape character `{}`", c));
+ c
+ }
+ None => {
+ self.err("expected an escape sequence, but format string was \
+ terminated");
+ ' '
+ }
+ }
+ }
+
+ /// Parses all of a string which is to be considered a "raw literal" in a
+ /// format string. This is everything outside of the braces.
+ fn string(&mut self, start: uint) -> &'a str {
+ loop {
+ // we may not consume the character, so clone the iterator
+ match self.cur.clone().next() {
+ Some((pos, '\\')) | Some((pos, '#')) |
+ Some((pos, '}')) | Some((pos, '{')) => {
+ return self.input.slice(start, pos);
+ }
+ Some(..) => { self.cur.next(); }
+ None => {
+ self.cur.next();
+ return self.input.slice(start, self.input.len());
+ }
+ }
+ }
+ }
+
+ /// Parses an Argument structure, or what's contained within braces inside
+ /// the format string
+ fn argument(&mut self) -> Argument<'a> {
+ Argument {
+ position: self.position(),
+ format: self.format(),
+ method: self.method(),
+ }
+ }
+
+ /// Parses a positional argument for a format. This could either be an
+ /// integer index of an argument, a named argument, or a blank string.
+ fn position(&mut self) -> Position<'a> {
+ match self.integer() {
+ Some(i) => { ArgumentIs(i) }
+ None => {
+ match self.cur.clone().next() {
+ Some((_, c)) if char::is_alphabetic(c) => {
+ ArgumentNamed(self.word())
+ }
+ _ => ArgumentNext
+ }
+ }
+ }
+ }
+
+ /// Parses a format specifier at the current position, returning all of the
+ /// relevant information in the FormatSpec struct.
+ fn format(&mut self) -> FormatSpec<'a> {
+ let mut spec = FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: self.input.slice(0, 0),
+ };
+ if !self.consume(':') { return spec }
+
+ // fill character
+ match self.cur.clone().next() {
+ Some((_, c)) => {
+ match self.cur.clone().skip(1).next() {
+ Some((_, '>')) | Some((_, '<')) => {
+ spec.fill = Some(c);
+ self.cur.next();
+ }
+ Some(..) | None => {}
+ }
+ }
+ None => {}
+ }
+ // Alignment
+ if self.consume('<') {
+ spec.align = AlignLeft;
+ } else if self.consume('>') {
+ spec.align = AlignRight;
+ }
+ // Sign flags
+ if self.consume('+') {
+ spec.flags |= 1 << (FlagSignPlus as uint);
+ } else if self.consume('-') {
+ spec.flags |= 1 << (FlagSignMinus as uint);
+ }
+ // Alternate marker
+ if self.consume('#') {
+ spec.flags |= 1 << (FlagAlternate as uint);
+ }
+ // Width and precision
+ let mut havewidth = false;
+ if self.consume('0') {
+ // small ambiguity with '0$' as a format string. In theory this is a
+ // '0' flag and then an ill-formatted format string with just a '$'
+ // and no count, but this is better if we instead interpret this as
+ // no '0' flag and '0$' as the width instead.
+ if self.consume('$') {
+ spec.width = CountIsParam(0);
+ havewidth = true;
+ } else {
+ spec.flags |= 1 << (FlagSignAwareZeroPad as uint);
+ }
+ }
+ if !havewidth {
+ spec.width = self.count();
+ }
+ if self.consume('.') {
+ if self.consume('*') {
+ spec.precision = CountIsNextParam;
+ } else {
+ spec.precision = self.count();
+ }
+ }
+ // Finally the actual format specifier
+ if self.consume('?') {
+ spec.ty = "?";
+ } else {
+ spec.ty = self.word();
+ }
+ return spec;
+ }
+
+ /// Parses a method to be applied to the previously specified argument and
+ /// its format. The two current supported methods are 'plural' and 'select'
+ fn method(&mut self) -> Option<Box<Method<'a>>> {
+ if !self.wsconsume(',') {
+ return None;
+ }
+ self.ws();
+ match self.word() {
+ "select" => {
+ self.must_consume(',');
+ Some(self.select())
+ }
+ "plural" => {
+ self.must_consume(',');
+ Some(self.plural())
+ }
+ "" => {
+ self.err("expected method after comma");
+ return None;
+ }
+ method => {
+ self.err(format!("unknown method: `{}`", method));
+ return None;
+ }
+ }
+ }
+
+ /// Parses a 'select' statement (after the initial 'select' word)
+ fn select(&mut self) -> Box<Method<'a>> {
+ let mut other = None;
+ let mut arms = vec!();
+ // Consume arms one at a time
+ loop {
+ self.ws();
+ let selector = self.word();
+ if selector == "" {
+ self.err("cannot have an empty selector");
+ break
+ }
+ self.must_consume('{');
+ self.depth += 1;
+ let pieces = self.collect();
+ self.depth -= 1;
+ self.must_consume('}');
+ if selector == "other" {
+ if !other.is_none() {
+ self.err("multiple `other` statements in `select");
+ }
+ other = Some(pieces);
+ } else {
+ arms.push(SelectArm { selector: selector, result: pieces });
+ }
+ self.ws();
+ match self.cur.clone().next() {
+ Some((_, '}')) => { break }
+ Some(..) | None => {}
+ }
+ }
+ // The "other" selector must be present
+ let other = match other {
+ Some(arm) => { arm }
+ None => {
+ self.err("`select` statement must provide an `other` case");
+ vec!()
+ }
+ };
+ box Select(arms, other)
+ }
+
+ /// Parses a 'plural' statement (after the initial 'plural' word)
+ fn plural(&mut self) -> Box<Method<'a>> {
+ let mut offset = None;
+ let mut other = None;
+ let mut arms = vec!();
+
+ // First, attempt to parse the 'offset:' field. We know the set of
+ // selector words which can appear in plural arms, and the only ones
+ // which start with 'o' are "other" and "offset", hence look two
+ // characters deep to see if we can consume the word "offset"
+ self.ws();
+ let mut it = self.cur.clone();
+ match it.next() {
+ Some((_, 'o')) => {
+ match it.next() {
+ Some((_, 'f')) => {
+ let word = self.word();
+ if word != "offset" {
+ self.err(format!("expected `offset`, found `{}`",
+ word));
+ } else {
+ self.must_consume(':');
+ match self.integer() {
+ Some(i) => { offset = Some(i); }
+ None => {
+ self.err("offset must be an integer");
+ }
+ }
+ }
+ }
+ Some(..) | None => {}
+ }
+ }
+ Some(..) | None => {}
+ }
+
+ // Next, generate all the arms
+ loop {
+ let mut isother = false;
+ let selector = if self.wsconsume('=') {
+ match self.integer() {
+ Some(i) => Literal(i),
+ None => {
+ self.err("plural `=` selectors must be followed by an \
+ integer");
+ Literal(0)
+ }
+ }
+ } else {
+ let word = self.word();
+ match word {
+ "other" => { isother = true; Keyword(Zero) }
+ "zero" => Keyword(Zero),
+ "one" => Keyword(One),
+ "two" => Keyword(Two),
+ "few" => Keyword(Few),
+ "many" => Keyword(Many),
+ word => {
+ self.err(format!("unexpected plural selector `{}`",
+ word));
+ if word == "" {
+ break
+ } else {
+ Keyword(Zero)
+ }
+ }
+ }
+ };
+ self.must_consume('{');
+ self.depth += 1;
+ let pieces = self.collect();
+ self.depth -= 1;
+ self.must_consume('}');
+ if isother {
+ if !other.is_none() {
+ self.err("multiple `other` statements in `select");
+ }
+ other = Some(pieces);
+ } else {
+ arms.push(PluralArm { selector: selector, result: pieces });
+ }
+ self.ws();
+ match self.cur.clone().next() {
+ Some((_, '}')) => { break }
+ Some(..) | None => {}
+ }
+ }
+
+ let other = match other {
+ Some(arm) => { arm }
+ None => {
+ self.err("`plural` statement must provide an `other` case");
+ vec!()
+ }
+ };
+ box Plural(offset, arms, other)
+ }
+
+ /// Parses a Count parameter at the current position. This does not check
+ /// for 'CountIsNextParam' because that is only used in precision, not
+ /// width.
+ fn count(&mut self) -> Count<'a> {
+ match self.integer() {
+ Some(i) => {
+ if self.consume('$') {
+ CountIsParam(i)
+ } else {
+ CountIs(i)
+ }
+ }
+ None => {
+ let tmp = self.cur.clone();
+ match self.word() {
+ word if word.len() > 0 && self.consume('$') => {
+ CountIsName(word)
+ }
+ _ => {
+ self.cur = tmp;
+ CountImplied
+ }
+ }
+ }
+ }
+ }
+
+ /// Parses a word starting at the current position. A word is considered to
+ /// be an alphabetic character followed by any number of alphanumeric
+ /// characters.
+ fn word(&mut self) -> &'a str {
+ let start = match self.cur.clone().next() {
+ Some((pos, c)) if char::is_XID_start(c) => {
+ self.cur.next();
+ pos
+ }
+ Some(..) | None => { return self.input.slice(0, 0); }
+ };
+ let mut end;
+ loop {
+ match self.cur.clone().next() {
+ Some((_, c)) if char::is_XID_continue(c) => {
+ self.cur.next();
+ }
+ Some((pos, _)) => { end = pos; break }
+ None => { end = self.input.len(); break }
+ }
+ }
+ self.input.slice(start, end)
+ }
+
+ /// Optionally parses an integer at the current position. This doesn't deal
+ /// with overflow at all, it's just accumulating digits.
+ fn integer(&mut self) -> Option<uint> {
+ let mut cur = 0;
+ let mut found = false;
+ loop {
+ match self.cur.clone().next() {
+ Some((_, c)) => {
+ match char::to_digit(c, 10) {
+ Some(i) => {
+ cur = cur * 10 + i;
+ found = true;
+ self.cur.next();
+ }
+ None => { break }
+ }
+ }
+ None => { break }
+ }
+ }
+ if found {
+ return Some(cur);
+ } else {
+ return None;
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn same(fmt: &'static str, p: &[Piece<'static>]) {
+ let mut parser = Parser::new(fmt);
+ assert!(p == parser.collect::<Vec<Piece<'static>>>().as_slice());
+ }
+
+ fn fmtdflt() -> FormatSpec<'static> {
+ return FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ }
+ }
+
+ fn musterr(s: &str) {
+ let mut p = Parser::new(s);
+ p.next();
+ assert!(p.errors.len() != 0);
+ }
+
+ #[test]
+ fn simple() {
+ same("asdf", [String("asdf")]);
+ same("a\\{b", [String("a"), String("{b")]);
+ same("a\\#b", [String("a"), String("#b")]);
+ same("a\\}b", [String("a"), String("}b")]);
+ same("a\\}", [String("a"), String("}")]);
+ same("\\}", [String("}")]);
+ }
+
+ #[test] fn invalid01() { musterr("{") }
+ #[test] fn invalid02() { musterr("\\") }
+ #[test] fn invalid03() { musterr("\\a") }
+ #[test] fn invalid04() { musterr("{3a}") }
+ #[test] fn invalid05() { musterr("{:|}") }
+ #[test] fn invalid06() { musterr("{:>>>}") }
+
+ #[test]
+ fn format_nothing() {
+ same("{}", [Argument(Argument {
+ position: ArgumentNext,
+ format: fmtdflt(),
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_position() {
+ same("{3}", [Argument(Argument {
+ position: ArgumentIs(3),
+ format: fmtdflt(),
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_position_nothing_else() {
+ same("{3:}", [Argument(Argument {
+ position: ArgumentIs(3),
+ format: fmtdflt(),
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_type() {
+ same("{3:a}", [Argument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "a",
+ },
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_align_fill() {
+ same("{3:>}", [Argument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: None,
+ align: AlignRight,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ method: None,
+ })]);
+ same("{3:0<}", [Argument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: Some('0'),
+ align: AlignLeft,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ method: None,
+ })]);
+ same("{3:*<abcd}", [Argument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: Some('*'),
+ align: AlignLeft,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "abcd",
+ },
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_counts() {
+ same("{:10s}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountIs(10),
+ ty: "s",
+ },
+ method: None,
+ })]);
+ same("{:10$.10s}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIs(10),
+ width: CountIsParam(10),
+ ty: "s",
+ },
+ method: None,
+ })]);
+ same("{:.*s}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIsNextParam,
+ width: CountImplied,
+ ty: "s",
+ },
+ method: None,
+ })]);
+ same("{:.10$s}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIsParam(10),
+ width: CountImplied,
+ ty: "s",
+ },
+ method: None,
+ })]);
+ same("{:a$.b$s}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIsName("b"),
+ width: CountIsName("a"),
+ ty: "s",
+ },
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_flags() {
+ same("{:-}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: (1 << FlagSignMinus as uint),
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ method: None,
+ })]);
+ same("{:+#}", [Argument(Argument {
+ position: ArgumentNext,
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: (1 << FlagSignPlus as uint) | (1 << FlagAlternate as uint),
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ method: None,
+ })]);
+ }
+ #[test]
+ fn format_mixture() {
+ same("abcd {3:a} efg", [String("abcd "), Argument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "a",
+ },
+ method: None,
+ }), String(" efg")]);
+ }
+
+ #[test]
+ fn select_simple() {
+ same("{, select, other { haha } }", [Argument(Argument{
+ position: ArgumentNext,
+ format: fmtdflt(),
+ method: Some(box Select(vec![], vec![String(" haha ")]))
+ })]);
+ same("{1, select, other { haha } }", [Argument(Argument{
+ position: ArgumentIs(1),
+ format: fmtdflt(),
+ method: Some(box Select(vec![], vec![String(" haha ")]))
+ })]);
+ same("{1, select, other {#} }", [Argument(Argument{
+ position: ArgumentIs(1),
+ format: fmtdflt(),
+ method: Some(box Select(vec![], vec![CurrentArgument]))
+ })]);
+ same("{1, select, other {{2, select, other {lol}}} }", [Argument(Argument{
+ position: ArgumentIs(1),
+ format: fmtdflt(),
+ method: Some(box Select(vec![], vec![Argument(Argument{
+ position: ArgumentIs(2),
+ format: fmtdflt(),
+ method: Some(box Select(vec![], vec![String("lol")]))
+ })])) // wat
+ })]);
+ }
+
+ #[test]
+ fn select_cases() {
+ same("{1, select, a{1} b{2} c{3} other{4} }", [Argument(Argument{
+ position: ArgumentIs(1),
+ format: fmtdflt(),
+ method: Some(box Select(vec![
+ SelectArm{ selector: "a", result: vec![String("1")] },
+ SelectArm{ selector: "b", result: vec![String("2")] },
+ SelectArm{ selector: "c", result: vec![String("3")] },
+ ], vec![String("4")]))
+ })]);
+ }
+
+ #[test] fn badselect01() { musterr("{select, }") }
+ #[test] fn badselect02() { musterr("{1, select}") }
+ #[test] fn badselect03() { musterr("{1, select, }") }
+ #[test] fn badselect04() { musterr("{1, select, a {}}") }
+ #[test] fn badselect05() { musterr("{1, select, other }}") }
+ #[test] fn badselect06() { musterr("{1, select, other {}") }
+ #[test] fn badselect07() { musterr("{select, other {}") }
+ #[test] fn badselect08() { musterr("{1 select, other {}") }
+ #[test] fn badselect09() { musterr("{:d select, other {}") }
+ #[test] fn badselect10() { musterr("{1:d select, other {}") }
+
+ #[test]
+ fn plural_simple() {
+ same("{, plural, other { haha } }", [Argument(Argument{
+ position: ArgumentNext,
+ format: fmtdflt(),
+ method: Some(box Plural(None, vec![], vec![String(" haha ")]))
+ })]);
+ same("{:, plural, other { haha } }", [Argument(Argument{
+ position: ArgumentNext,
+ format: fmtdflt(),
+ method: Some(box Plural(None, vec![], vec![String(" haha ")]))
+ })]);
+ same("{, plural, offset:1 =2{2} =3{3} many{yes} other{haha} }",
+ [Argument(Argument{
+ position: ArgumentNext,
+ format: fmtdflt(),
+ method: Some(box Plural(Some(1), vec![
+ PluralArm{ selector: Literal(2), result: vec![String("2")] },
+ PluralArm{ selector: Literal(3), result: vec![String("3")] },
+ PluralArm{ selector: Keyword(Many), result: vec![String("yes")] }
+ ], vec![String("haha")]))
+ })]);
+ }
+}
//! fn main() {
//! let args = os::args();
//!
-//! let program = args[0].clone();
+//! let program = args.get(0).clone();
//!
//! let opts = [
//! optopt("o", "", "set output file name", "NAME"),
impl GetAddrInfoRequest {
pub fn run(host: Option<&str>, servname: Option<&str>,
- hint: Option<ai::Hint>) -> Result<~[ai::Info], IoError> {
+ hint: Option<ai::Hint>) -> Result<Vec<ai::Info>, IoError> {
assert!(host.is_some() || servname.is_some());
let c_host = host.map_or(unsafe { CString::new(null(), true) }, |x| x.to_c_str());
unsafe { freeaddrinfo(res); }
- Ok(addrs.move_iter().collect())
+ Ok(addrs)
}
}
use std::rt::rtio;
use std::str;
use std::sync::arc::UnsafeArc;
-use std::slice;
+use std::vec;
use io::IoResult;
if fp_buf as uint == 0 {
fail!("os::list_dir() failure: got null ptr from wfd");
} else {
- let fp_vec = slice::from_buf(fp_buf, libc::wcslen(fp_buf) as uint);
- let fp_trimmed = str::truncate_utf16_at_nul(fp_vec);
+ let fp_vec = vec::raw::from_buf(fp_buf, libc::wcslen(fp_buf) as uint);
+ let fp_trimmed = str::truncate_utf16_at_nul(fp_vec.as_slice());
let fp_str = str::from_utf16(fp_trimmed)
.expect("rust_list_dir_wfd_fp_buf returned invalid UTF-16");
paths.push(Path::new(fp_str));
})
}
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
- hint: Option<ai::Hint>) -> IoResult<~[ai::Info]> {
+ hint: Option<ai::Hint>) -> IoResult<Vec<ai::Info>> {
addrinfo::GetAddrInfoRequest::run(host, servname, hint)
}
}
fn spawn(&mut self, config: ProcessConfig)
-> IoResult<(Box<RtioProcess:Send>,
- ~[Option<Box<RtioPipe:Send>>])> {
+ Vec<Option<Box<RtioPipe:Send>>>)> {
process::Process::spawn(config).map(|(p, io)| {
(box p as Box<RtioProcess:Send>,
io.move_iter().map(|p| p.map(|p| {
/// os pipe instead. This process takes ownership of these file
/// descriptors, closing them upon destruction of the process.
pub fn spawn(config: p::ProcessConfig)
- -> Result<(Process, ~[Option<file::FileDesc>]), io::IoError>
+ -> Result<(Process, Vec<Option<file::FileDesc>>), io::IoError>
{
// right now we only handle stdin/stdout/stderr.
if config.extra_io.len() > 0 {
exit_code: None,
exit_signal: None,
},
- ret_io.move_iter().collect()))
+ ret_io))
}
Err(e) => Err(e)
}
/**
A big unsigned integer type.
-A `BigUint`-typed value `BigUint { data: ~[a, b, c] }` represents a number
+A `BigUint`-typed value `BigUint { data: vec!(a, b, c) }` represents a number
`(a + b * BigDigit::base + c * BigDigit::base^2)`.
*/
#[deriving(Clone)]
// the crate id in the hash because lookups are only done by (name/vers),
// not by path.
let mut s = Sha256::new();
- s.input_str(crate_id.short_name_with_version());
+ s.input_str(crate_id.short_name_with_version().as_slice());
truncated_hash_result(&mut s).slice_to(8).to_owned()
}
// to be independent of one another in the crate.
symbol_hasher.reset();
- symbol_hasher.input_str(link_meta.crateid.name);
+ symbol_hasher.input_str(link_meta.crateid.name.as_slice());
symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str());
symbol_hasher.input_str("-");
let libs = sess.cstore.get_used_crates(cstore::RequireDynamic);
let libs = libs.move_iter().filter_map(|(_, l)| {
l.map(|p| p.clone())
- }).collect::<~[_]>();
+ }).collect::<Vec<_>>();
- let rpaths = get_rpaths(os, sysroot, output, libs,
+ let rpaths = get_rpaths(os, sysroot, output, libs.as_slice(),
sess.opts.target_triple);
flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice());
flags
fn parse_cfgspecs(cfgspecs: Vec<~str> )
-> ast::CrateConfig {
cfgspecs.move_iter().map(|s| {
- parse::parse_meta_from_source_str("cfgspec".to_str(),
- s,
+ parse::parse_meta_from_source_str("cfgspec".to_strbuf(),
+ s.to_strbuf(),
Vec::new(),
&parse::new_parse_sess())
}).collect::<ast::CrateConfig>()
parse::parse_crate_from_file(&(*file), cfg.clone(), &sess.parse_sess)
}
StrInput(ref src) => {
- parse::parse_crate_from_source_str(anon_src(),
- (*src).clone(),
+ parse::parse_crate_from_source_str(anon_src().to_strbuf(),
+ src.to_strbuf(),
cfg.clone(),
&sess.parse_sess)
}
// write Makefile-compatible dependency rules
let files: Vec<~str> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file())
- .map(|fmap| fmap.name.clone())
+ .map(|fmap| fmap.name.to_owned())
.collect();
let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() {
match node {
pprust::NodeItem(item) => {
try!(pp::space(&mut s.s));
- s.synth_comment(item.id.to_str())
+ s.synth_comment(item.id.to_str().to_strbuf())
}
pprust::NodeBlock(blk) => {
try!(pp::space(&mut s.s));
- s.synth_comment("block ".to_owned() + blk.id.to_str())
+ s.synth_comment((format!("block {}", blk.id)).to_strbuf())
}
pprust::NodeExpr(expr) => {
try!(pp::space(&mut s.s));
- try!(s.synth_comment(expr.id.to_str()));
+ try!(s.synth_comment(expr.id.to_str().to_strbuf()));
s.pclose()
}
pprust::NodePat(pat) => {
try!(pp::space(&mut s.s));
- s.synth_comment("pat ".to_owned() + pat.id.to_str())
+ s.synth_comment((format!("pat {}", pat.id)).to_strbuf())
}
}
}
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&krate,
- src_name,
+ src_name.to_strbuf(),
&mut rdr,
out,
&IdentifiedAnnotation,
pprust::print_crate(annotation.analysis.ty_cx.sess.codemap(),
annotation.analysis.ty_cx.sess.diagnostic(),
&krate,
- src_name,
+ src_name.to_strbuf(),
&mut rdr,
out,
&annotation,
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&krate,
- src_name,
+ src_name.to_strbuf(),
&mut rdr,
out,
&pprust::NoAnn,
)
// Seems out of place, but it uses session, so I'm putting it here
-pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> ~str) -> T {
+pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> StrBuf)
+ -> T {
diagnostic::expect(sess.diagnostic(), opt, msg)
}
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
- name: "test".to_owned(),
+ name: "test".to_strbuf(),
format: MacroAttribute,
span: None
}
#![!resolve_unexported]
extern crate test (name = "test", vers = "...");
fn main() {
- test::test_main_static(::os::args(), tests)
+ test::test_main_static(::os::args().as_slice(), tests)
}
static tests : &'static [test::TestDescAndFn] = &[
let mainfn = (quote_item!(&cx.ext_cx,
pub fn main() {
#![main]
- #![allow(deprecated_owned_vector)]
- test::test_main_static(::std::os::args(), TESTS);
+ use std::slice::Vector;
+ test::test_main_static(::std::os::args().as_slice(), TESTS);
}
)).unwrap();
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crateid(krate.attrs.as_slice()) {
- Some(ref s) if "test" == s.name => true,
+ Some(ref s) if "test" == s.name.as_slice() => true,
_ => false
}
}
let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident(
- ast_util::path_name_i(path.as_slice())),
+ ast_util::path_name_i(path.as_slice()).as_slice()),
ast::CookedStr));
let name_expr = @ast::Expr {
&sess.parse_sess)
}
d::StrInput(ref src) => {
- parse::parse_crate_attrs_from_source_str(d::anon_src(),
- (*src).clone(),
- Vec::new(),
- &sess.parse_sess)
+ parse::parse_crate_attrs_from_source_str(
+ d::anon_src().to_strbuf(),
+ src.to_strbuf(),
+ Vec::new(),
+ &sess.parse_sess)
}
};
result.move_iter().collect()
}
pub fn main() {
- std::os::set_exit_status(main_args(std::os::args()));
+ std::os::set_exit_status(main_args(std::os::args().as_slice()));
}
pub fn main_args(args: &[~str]) -> int {
};
let macros = decoder::get_exported_macros(library.metadata.as_slice());
let registrar = decoder::get_macro_registrar_fn(library.metadata.as_slice()).map(|id| {
- decoder::get_symbol(library.metadata.as_slice(), id)
+ decoder::get_symbol(library.metadata.as_slice(), id).to_strbuf()
});
let mc = MacroCrate {
lib: library.dylib.clone(),
- macros: macros.move_iter().collect(),
+ macros: macros.move_iter().map(|x| x.to_strbuf()).collect(),
registrar_symbol: registrar,
};
if should_link {
let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
let class_doc = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(class_id.node, all_items),
- || format!("get_field_type: class ID {:?} not found",
- class_id) );
+ || {
+ (format!("get_field_type: class ID {:?} not found",
+ class_id)).to_strbuf()
+ });
let the_field = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(def.node, class_doc),
- || format!("get_field_type: in class {:?}, field ID {:?} not found",
- class_id, def) );
+ || {
+ (format!("get_field_type: in class {:?}, field ID {:?} not found",
+ class_id,
+ def)).to_strbuf()
+ });
let ty = decoder::item_type(def, the_field, tcx, &*cdata);
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),
let def = self.ecx.tcx.sess.codemap().span_to_snippet(item.span)
.expect("Unable to find source for macro");
self.ebml_w.start_tag(tag_macro_def);
- self.ebml_w.wr_str(def);
+ self.ebml_w.wr_str(def.as_slice());
self.ebml_w.end_tag();
}
_ => {}
"".to_owned()
};
- try!(ps.synth_comment(format!("id {}: {}{}{}", id, entry_str,
- gens_str, kills_str)));
+ try!(ps.synth_comment((format!("id {}: {}{}{}", id, entry_str,
+ gens_str, kills_str)).to_strbuf()));
try!(pp::space(&mut ps.s));
}
Ok(())
fn check_pat(cx: &mut Context, pat: &Pat) {
let var_name = match pat.node {
PatWild => Some("_".to_owned()),
- PatIdent(_, ref path, _) => Some(path_to_str(path)),
+ PatIdent(_, ref path, _) => Some(path_to_str(path).to_owned()),
_ => None
};
impl<'a> PrivacyVisitor<'a> {
// used when debugging
fn nodestr(&self, id: ast::NodeId) -> ~str {
- self.tcx.map.node_to_str(id)
+ self.tcx.map.node_to_str(id).to_owned()
}
// Determines whether the given definition is public from the point of view
match def {
DefMod(_) | DefForeignMod(_) => {}
- DefVariant(_, variant_id, is_struct) => {
+ DefVariant(enum_did, variant_id, is_struct) => {
debug!("(building reduced graph for external crate) building \
variant {}",
final_ident);
- // We assume the parent is visible, or else we wouldn't have seen
- // it. Also variants are public-by-default if the parent was also
- // public.
+ // If this variant is public, then it was publicly reexported,
+ // otherwise we need to inherit the visibility of the enum
+ // definition.
+ let is_exported = is_public ||
+ self.external_exports.contains(&enum_did);
if is_struct {
- child_name_bindings.define_type(def, DUMMY_SP, true);
+ child_name_bindings.define_type(def, DUMMY_SP, is_exported);
self.structs.insert(variant_id);
} else {
- child_name_bindings.define_value(def, DUMMY_SP, true);
+ child_name_bindings.define_value(def, DUMMY_SP, is_exported);
}
}
DefFn(..) | DefStaticMethod(..) | DefStatic(..) => {
.codemap()
.span_to_snippet(imports.get(index).span)
.unwrap();
- if sn.contains("::") {
+ if sn.as_slice().contains("::") {
self.resolve_error(imports.get(index).span,
"unresolved import");
} else {
let err = format!("unresolved import (maybe you meant `{}::*`?)",
- sn.slice(0, sn.len()));
+ sn.as_slice().slice(0, sn.len()));
self.resolve_error(imports.get(index).span, err);
}
}
let f = decl_rust_fn(ccx, false, inputs, output, name);
csearch::get_item_attrs(&ccx.sess().cstore, did, |meta_items| {
- set_llvm_fn_attrs(meta_items.iter().map(|&x| attr::mk_attr(x)).collect::<~[_]>(), f)
+ set_llvm_fn_attrs(meta_items.iter().map(|&x| attr::mk_attr(x))
+ .collect::<Vec<_>>().as_slice(), f)
});
ccx.externs.borrow_mut().insert(name.to_owned(), f);
for p in param_substs.iter() { p.validate(); }
debug!("new_fn_ctxt(path={}, id={}, param_substs={})",
- if id == -1 { "".to_owned() } else { ccx.tcx.map.path_to_str(id) },
+ if id == -1 {
+ "".to_owned()
+ } else {
+ ccx.tcx.map.path_to_str(id).to_owned()
+ },
id, param_substs.map(|s| s.repr(ccx.tcx())));
let substd_output_type = match param_substs {
param_substs: Option<¶m_substs>,
id: ast::NodeId,
attrs: &[ast::Attribute]) {
- let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id));
+ let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_owned());
debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx())));
let _icx = push_ctxt("trans_fn");
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
let metadata = encoder::encode_metadata(encode_parms, krate);
- let compressed = encoder::metadata_encoding_version +
- match flate::deflate_bytes(metadata.as_slice()) {
- Some(compressed) => compressed,
- None => cx.sess().fatal(format!("failed to compress metadata", ))
- }.as_slice();
- let llmeta = C_bytes(cx, compressed);
+ let compressed = Vec::from_slice(encoder::metadata_encoding_version)
+ .append(match flate::deflate_bytes(metadata.as_slice()) {
+ Some(compressed) => compressed,
+ None => cx.sess().fatal(format!("failed to compress metadata"))
+ }.as_slice());
+ let llmeta = C_bytes(cx, compressed.as_slice());
let llconst = C_struct(cx, [llmeta], false);
let name = format!("rust_metadata_{}_{}_{}", cx.link_meta.crateid.name,
cx.link_meta.crateid.version_or_default(), cx.link_meta.crate_hash);
// crashes if the module identifer is same as other symbols
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
- let llmod_id = link_meta.crateid.name + ".rs";
+ let mut llmod_id = link_meta.crateid.name.clone();
+ llmod_id.push_str(".rs");
- let ccx = CrateContext::new(llmod_id, tcx, exp_map2,
+ let ccx = CrateContext::new(llmod_id.as_slice(), tcx, exp_map2,
Sha256::new(), link_meta, reachable);
{
let _icx = push_ctxt("text");
let map_node = session::expect(
ccx.sess(),
tcx.map.find(def_id.node),
- || format!("local item should be in ast map"));
+ || "local item should be in ast map".to_strbuf());
match map_node {
ast_map::NodeForeignItem(_) => {
}
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str {
- self.tcx().map.node_to_str(id)
+ self.tcx().map.node_to_str(id).to_owned()
}
pub fn expr_to_str(&self, e: &ast::Expr) -> ~str {
-> (ValueRef, ValueRef) {
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(),
- token::intern_and_get_ident(loc.file.name), true);
+ token::intern_and_get_ident(loc.file
+ .name
+ .as_slice()),
+ true);
let filename = build::PointerCast(bcx, filename_cstr, Type::i8p(bcx.ccx()));
let line = C_int(bcx.ccx(), loc.line as int);
(filename, line)
use util::ppaux::{Repr, ty_to_str};
use std::c_str::ToCStr;
-use std::slice;
+use std::vec;
use std::vec::Vec;
use libc::c_uint;
use syntax::{ast, ast_util};
let vec_ty = ty::expr_ty(cx.tcx(), e);
let unit_ty = ty::sequence_element_type(cx.tcx(), vec_ty);
let llunitty = type_of::type_of(cx, unit_ty);
- let (vs, inlineable) = slice::unzip(es.iter().map(|e| const_expr(cx, *e, is_local)));
+ let (vs, inlineable) = vec::unzip(es.iter().map(|e| const_expr(cx, *e, is_local)));
// If the vector contains enums, an LLVM array won't work.
let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
- C_struct(cx, vs, false)
+ C_struct(cx, vs.as_slice(), false)
} else {
- C_array(llunitty, vs)
+ C_array(llunitty, vs.as_slice())
};
(v, llunitty, inlineable.iter().fold(true, |a, &b| a && b))
}
};
expr::with_field_tys(tcx, ety, Some(e.id), |discr, field_tys| {
- let (cs, inlineable) = slice::unzip(field_tys.iter().enumerate()
+ let (cs, inlineable) = vec::unzip(field_tys.iter().enumerate()
.map(|(ix, &field_ty)| {
match fs.iter().find(|f| field_ty.ident.name == f.ident.node.name) {
Some(f) => const_expr(cx, (*f).expr, is_local),
}
}
}));
- (adt::trans_const(cx, &*repr, discr, cs),
+ (adt::trans_const(cx, &*repr, discr, cs.as_slice()),
inlineable.iter().fold(true, |a, &b| a && b))
})
}
let v_fail_str = C_cstr(ccx, fail_str, true);
let _icx = push_ctxt("trans_fail_value");
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
- let v_filename = C_cstr(ccx, token::intern_and_get_ident(loc.file.name), true);
+ let v_filename = C_cstr(ccx,
+ token::intern_and_get_ident(loc.file
+ .name
+ .as_slice()),
+ true);
let v_line = loc.line as int;
let v_str = PointerCast(bcx, v_fail_str, Type::i8p(ccx));
let v_filename = PointerCast(bcx, v_filename, Type::i8p(ccx));
};
let filename = span_start(cx, span).file.name.clone();
- let file_metadata = file_metadata(cx, filename);
+ let file_metadata = file_metadata(cx, filename.as_slice());
let is_local_to_unit = is_node_local_to_unit(cx, node_id);
let loc = span_start(cx, span);
}
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx, fn_ast_id, fn_decl, param_substs, span);
});
fn fallback_path(cx: &CrateContext) -> CString {
- cx.link_meta.crateid.name.to_c_str()
+ cx.link_meta.crateid.name.as_slice().to_c_str()
}
}
let cx: &CrateContext = bcx.ccx();
let filename = span_start(cx, span).file.name.clone();
- let file_metadata = file_metadata(cx, filename);
+ let file_metadata = file_metadata(cx, filename.as_slice());
let name = token::get_ident(variable_ident);
let loc = span_start(cx, span);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
let file_name = span_start(cx, definition_span).file.name.clone();
- let file_metadata = file_metadata(cx, file_name);
+ let file_metadata = file_metadata(cx, file_name.as_slice());
let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type,
let tuple_llvm_type = type_of::type_of(cx, tuple_type);
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
UnfinishedMetadata {
cache_id: cache_id_for_type(tuple_type),
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
let loc = span_start(cx, definition_span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
// For empty enums there is an early exit. Just describe it as an empty struct with the
// appropriate type name
];
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
return composite_type_metadata(
cx,
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
composite_type_metadata(
cx,
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
return composite_type_metadata(
cx,
span: Span)
-> DICompositeType {
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
let mut signature_metadata: Vec<DIType> =
Vec::with_capacity(signature.inputs.len() + 1);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
let file_name = span_start(cx, definition_span).file.name.clone();
- let file_metadata = file_metadata(cx, file_name);
+ let file_metadata = file_metadata(cx, file_name.as_slice());
let trait_llvm_type = type_of::type_of(cx, trait_type);
&mut HashMap<ast::NodeId, DIScope>|) {
// Create a new lexical scope and push it onto the stack
let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.name.as_slice());
let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe {
if need_new_scope {
// Create a new lexical scope and push it onto the stack
let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx,
+ loc.file
+ .name
+ .as_slice());
let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe {
ty::with_path(cx.tcx(), def_id, |path| {
// prepend crate name if not already present
let krate = if def_id.krate == ast::LOCAL_CRATE {
- let crate_namespace_ident = token::str_to_ident(cx.link_meta.crateid.name);
+ let crate_namespace_ident = token::str_to_ident(cx.link_meta
+ .crateid
+ .name
+ .as_slice());
Some(ast_map::PathMod(crate_namespace_ident.name))
} else {
None
let map_node = session::expect(
ccx.sess(),
ccx.tcx.map.find(fn_id.node),
- || format!("while monomorphizing {:?}, couldn't find it in the \
- item map (may have attempted to monomorphize an item \
- defined in a different crate?)", fn_id));
+ || {
+ (format!("while monomorphizing {:?}, couldn't find it in the \
+ item map (may have attempted to monomorphize an item \
+ defined in a different crate?)", fn_id)).to_strbuf()
+ });
match map_node {
ast_map::NodeForeignItem(_) => {
// This is a bit unfortunate.
let idx = real_substs.tps.len() - num_method_ty_params;
- let substs = real_substs.tps.slice(0, idx) +
- &[real_substs.self_ty.unwrap()] + real_substs.tps.tailn(idx);
+ let substs = Vec::from_slice(real_substs.tps.slice(0, idx))
+ .append([real_substs.self_ty.unwrap()])
+ .append(real_substs.tps.tailn(idx));
debug!("static default: changed substitution to {}",
substs.repr(ccx.tcx()));
- ty::subst_tps(ccx.tcx(), substs, None, llitem_ty)
+ ty::subst_tps(ccx.tcx(), substs.as_slice(), None, llitem_ty)
}
};
}
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str {
- with_path(cx, id, |path| ast_map::path_to_str(path))
+ with_path(cx, id, |path| ast_map::path_to_str(path)).to_owned()
}
pub enum DtorKind {
fn fold_trait_store(&mut self, s: ty::TraitStore) -> ty::TraitStore {
super_fold_trait_store(self, s)
}
+
+ fn fold_autoref(&mut self, ar: &ty::AutoRef) -> ty::AutoRef {
+ super_fold_autoref(self, ar)
+ }
}
pub fn fold_opt_ty<T:TypeFolder>(this: &mut T,
}
}
+pub fn super_fold_autoref<T:TypeFolder>(this: &mut T,
+ autoref: &ty::AutoRef)
+ -> ty::AutoRef
+{
+ match *autoref {
+ ty::AutoPtr(r, m) => ty::AutoPtr(this.fold_region(r), m),
+ ty::AutoBorrowVec(r, m) => ty::AutoBorrowVec(this.fold_region(r), m),
+ ty::AutoBorrowVecRef(r, m) => ty::AutoBorrowVecRef(this.fold_region(r), m),
+ ty::AutoUnsafe(m) => ty::AutoUnsafe(m),
+ ty::AutoBorrowObj(r, m) => ty::AutoBorrowObj(this.fold_region(r), m),
+ }
+}
+
///////////////////////////////////////////////////////////////////////////
// Some sample folders
#[deriving(Clone)]
pub struct FnCtxt<'a> {
+ // This flag is set to true if, during the writeback phase, we encounter
+ // a type error in this function.
+ writeback_errors: Cell<bool>,
+
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
// have been reported, we will skip regionck and other work that
region_bnd: ast::NodeId)
-> FnCtxt<'a> {
FnCtxt {
+ writeback_errors: Cell::new(false),
err_count_on_creation: ccx.tcx.sess.err_count(),
ret_ty: rty,
ps: RefCell::new(FnStyleState::function(ast::NormalFn, 0)),
// Create the function context. This is either derived from scratch or,
// in the case of function expressions, based on the outer context.
let fcx = FnCtxt {
+ writeback_errors: Cell::new(false),
err_count_on_creation: err_count_on_creation,
ret_ty: ret_ty,
ps: RefCell::new(FnStyleState::function(fn_style, id)),
pub fn opt_node_ty_substs(&self,
id: ast::NodeId,
- f: |&ty::substs| -> bool)
- -> bool {
+ f: |&ty::substs|) {
match self.inh.node_type_substs.borrow().find(&id) {
- Some(s) => f(s),
- None => true
+ Some(s) => { f(s) }
+ None => { }
}
}
insert_vtables(fcx, MethodCall::expr(ex.id), vtbls);
}
}
- true
});
}
use middle::pat_util;
use middle::ty;
+use middle::ty_fold::TypeFolder;
use middle::typeck::astconv::AstConv;
use middle::typeck::check::FnCtxt;
use middle::typeck::infer::{force_all, resolve_all, resolve_region};
use middle::typeck::infer::resolve_type;
use middle::typeck::infer;
use middle::typeck::{MethodCall, MethodCallee};
-use middle::typeck::{vtable_res, vtable_static, vtable_param};
+use middle::typeck::{vtable_origin, vtable_static, vtable_param};
use middle::typeck::write_substs_to_tcx;
use middle::typeck::write_ty_to_tcx;
-use util::ppaux;
use util::ppaux::Repr;
use syntax::ast;
use syntax::visit;
use syntax::visit::Visitor;
-fn resolve_type_vars_in_type(fcx: &FnCtxt, sp: Span, typ: ty::t)
- -> Option<ty::t> {
- if !ty::type_needs_infer(typ) { return Some(typ); }
- match resolve_type(fcx.infcx(), typ, resolve_all | force_all) {
- Ok(new_type) => return Some(new_type),
- Err(e) => {
- if !fcx.ccx.tcx.sess.has_errors() {
- fcx.ccx.tcx.sess.span_err(
- sp,
- format!("cannot determine a type \
- for this expression: {}",
- infer::fixup_err_to_str(e)))
- }
- return None;
+///////////////////////////////////////////////////////////////////////////
+// Entry point functions
+
+pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &ast::Expr) {
+ assert_eq!(fcx.writeback_errors.get(), false);
+ let mut wbcx = WritebackCx::new(fcx);
+ wbcx.visit_expr(e, ());
+ wbcx.visit_upvar_borrow_map();
+}
+
+pub fn resolve_type_vars_in_fn(fcx: &FnCtxt,
+ decl: &ast::FnDecl,
+ blk: &ast::Block) {
+ assert_eq!(fcx.writeback_errors.get(), false);
+ let mut wbcx = WritebackCx::new(fcx);
+ wbcx.visit_block(blk, ());
+ for arg in decl.inputs.iter() {
+ wbcx.visit_pat(arg.pat, ());
+
+ // Privacy needs the type for the whole pattern, not just each binding
+ if !pat_util::pat_is_binding(&fcx.tcx().def_map, arg.pat) {
+ wbcx.visit_node_id(ResolvingPattern(arg.pat.span),
+ arg.pat.id);
}
}
+ wbcx.visit_upvar_borrow_map();
}
-fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, method_call: MethodCall) {
- let fcx = wbcx.fcx;
- let tcx = fcx.ccx.tcx;
-
- // Resolve any method map entry
- match fcx.inh.method_map.borrow_mut().pop(&method_call) {
- Some(method) => {
- debug!("writeback::resolve_method_map_entry(call={:?}, entry={:?})",
- method_call, method.repr(tcx));
- let new_method = MethodCallee {
- origin: method.origin,
- ty: match resolve_type_vars_in_type(fcx, sp, method.ty) {
- Some(t) => t,
- None => {
- wbcx.success = false;
- return;
- }
- },
- substs: ty::substs {
- tps: method.substs.tps.move_iter().map(|subst| {
- match resolve_type_vars_in_type(fcx, sp, subst) {
- Some(t) => t,
- None => { wbcx.success = false; ty::mk_err() }
- }
- }).collect(),
- regions: ty::ErasedRegions,
- self_ty: None
- }
- };
- tcx.method_map.borrow_mut().insert(method_call, new_method);
- }
- None => {}
+///////////////////////////////////////////////////////////////////////////
+// The Writerback context. This visitor walks the AST, checking the
+// fn-specific tables to find references to types or regions. It
+// resolves those regions to remove inference variables and writes the
+// final result back into the master tables in the tcx. Here and
+// there, it applies a few ad-hoc checks that were not convenient to
+// do elsewhere.
+
+struct WritebackCx<'cx> {
+ fcx: &'cx FnCtxt<'cx>,
+}
+
+impl<'cx> WritebackCx<'cx> {
+ fn new(fcx: &'cx FnCtxt) -> WritebackCx<'cx> {
+ WritebackCx { fcx: fcx }
+ }
+
+ fn tcx(&self) -> &'cx ty::ctxt {
+ self.fcx.tcx()
}
}
-fn resolve_vtable_map_entry(fcx: &FnCtxt, sp: Span, vtable_key: MethodCall) {
- // Resolve any vtable map entry
- match fcx.inh.vtable_map.borrow_mut().pop(&vtable_key) {
- Some(origins) => {
- let r_origins = resolve_origins(fcx, sp, origins);
- debug!("writeback::resolve_vtable_map_entry(vtable_key={}, vtables={:?})",
- vtable_key, r_origins.repr(fcx.tcx()));
- fcx.tcx().vtable_map.borrow_mut().insert(vtable_key, r_origins);
+///////////////////////////////////////////////////////////////////////////
+// Impl of Visitor for Resolver
+//
+// This is the master code which walks the AST. It delegates most of
+// the heavy lifting to the generic visit and resolve functions
+// below. In general, a function is made into a `visitor` if it must
+// traffic in node-ids or update tables in the type context etc.
+
+impl<'cx> Visitor<()> for WritebackCx<'cx> {
+ fn visit_item(&mut self, _: &ast::Item, _: ()) {
+ // Ignore items
+ }
+
+ fn visit_stmt(&mut self, s: &ast::Stmt, _: ()) {
+ if self.fcx.writeback_errors.get() {
+ return;
}
- None => {}
+
+ self.visit_node_id(ResolvingExpr(s.span), ty::stmt_node_id(s));
+ visit::walk_stmt(self, s, ());
}
- fn resolve_origins(fcx: &FnCtxt, sp: Span,
- vtbls: vtable_res) -> vtable_res {
- vtbls.move_iter().map(|os| os.move_iter().map(|origin| {
- match origin {
- vtable_static(def_id, tys, origins) => {
- let r_tys = tys.move_iter().map(|t| {
- match resolve_type_vars_in_type(fcx, sp, t) {
- Some(t1) => t1,
- None => ty::mk_err()
- }
- }).collect();
- let r_origins = resolve_origins(fcx, sp, origins);
- vtable_static(def_id, r_tys, r_origins)
+ fn visit_expr(&mut self, e:&ast::Expr, _: ()) {
+ if self.fcx.writeback_errors.get() {
+ return;
+ }
+
+ self.visit_node_id(ResolvingExpr(e.span), e.id);
+ self.visit_method_map_entry(ResolvingExpr(e.span),
+ MethodCall::expr(e.id));
+ self.visit_vtable_map_entry(ResolvingExpr(e.span),
+ MethodCall::expr(e.id));
+
+ match e.node {
+ ast::ExprFnBlock(ref decl, _) | ast::ExprProc(ref decl, _) => {
+ for input in decl.inputs.iter() {
+ let _ = self.visit_node_id(ResolvingExpr(e.span),
+ input.id);
}
- vtable_param(n, b) => vtable_param(n, b)
}
- }).collect()).collect()
+ _ => {}
+ }
+
+ visit::walk_expr(self, e, ());
}
-}
-fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) {
- let fcx = wbcx.fcx;
- let tcx = fcx.ccx.tcx;
-
- // Resolve any borrowings for the node with id `id`
- let resolved_adj = match fcx.inh.adjustments.borrow_mut().pop(&id) {
- None => None,
-
- Some(adjustment) => {
- Some(match adjustment {
- ty::AutoAddEnv(store) => {
- let r = match store {
- ty::RegionTraitStore(r, _) => r,
- ty::UniqTraitStore => ty::ReStatic
- };
- match resolve_region(fcx.infcx(),
- r,
- resolve_all | force_all) {
- Err(e) => {
- // This should not, I think, happen:
- tcx.sess.span_err(
- sp,
- format!("cannot resolve bound for closure: \
- {}",
- infer::fixup_err_to_str(e)));
- wbcx.success = false;
- return;
- }
- Ok(r1) => {
- // FIXME(eddyb) #2190 Allow only statically resolved
- // bare functions to coerce to a closure to avoid
- // constructing (slower) indirect call wrappers.
- match tcx.def_map.borrow().find(&id) {
- Some(&ast::DefFn(..)) |
- Some(&ast::DefStaticMethod(..)) |
- Some(&ast::DefVariant(..)) |
- Some(&ast::DefStruct(_)) => {}
- _ => tcx.sess.span_err(sp,
- "cannot coerce non-statically resolved bare fn")
- }
+ fn visit_block(&mut self, b: &ast::Block, _: ()) {
+ if self.fcx.writeback_errors.get() {
+ return;
+ }
- ty::AutoAddEnv(match store {
- ty::RegionTraitStore(..) => {
- ty::RegionTraitStore(r1, ast::MutMutable)
- }
- ty::UniqTraitStore => ty::UniqTraitStore
- })
- }
- }
- }
+ self.visit_node_id(ResolvingExpr(b.span), b.id);
+ visit::walk_block(self, b, ());
+ }
- ty::AutoDerefRef(adj) => {
- for autoderef in range(0, adj.autoderefs) {
- let method_call = MethodCall::autoderef(id, autoderef as u32);
- resolve_method_map_entry(wbcx, sp, method_call);
- resolve_vtable_map_entry(wbcx.fcx, sp, method_call);
- }
+ fn visit_pat(&mut self, p: &ast::Pat, _: ()) {
+ if self.fcx.writeback_errors.get() {
+ return;
+ }
- ty::AutoDerefRef(ty::AutoDerefRef {
- autoderefs: adj.autoderefs,
- autoref: adj.autoref.map(|r| r.map_region(|r| {
- match resolve_region(fcx.infcx(), r,
- resolve_all | force_all) {
- Ok(r1) => r1,
- Err(e) => {
- // This should not, I think, happen.
- tcx.sess.span_err(
- sp,
- format!("cannot resolve scope of borrow: \
- {}",
- infer::fixup_err_to_str(e)));
- r
- }
- }
- })),
- })
- }
+ self.visit_node_id(ResolvingPattern(p.span), p.id);
+
+ debug!("Type for pattern binding {} (id {}) resolved to {}",
+ pat_to_str(p),
+ p.id,
+ ty::node_id_to_type(self.tcx(), p.id).repr(self.tcx()));
+
+ visit::walk_pat(self, p, ());
+ }
- adjustment => adjustment
- })
+ fn visit_local(&mut self, l: &ast::Local, _: ()) {
+ if self.fcx.writeback_errors.get() {
+ return;
}
- };
- debug!("Adjustments for node {}: {:?}",
- id, resolved_adj);
- match resolved_adj {
- Some(adj) => {
- tcx.adjustments.borrow_mut().insert(id, adj);
+ let var_ty = self.fcx.local_ty(l.span, l.id);
+ let var_ty = var_ty.resolve(self.fcx, ResolvingLocal(l.span));
+ write_ty_to_tcx(self.tcx(), l.id, var_ty);
+ visit::walk_local(self, l, ());
+ }
+
+ fn visit_ty(&mut self, _t: &ast::Ty, _: ()) {
+ // ignore
+ }
+}
+
+impl<'cx> WritebackCx<'cx> {
+ fn visit_upvar_borrow_map(&self) {
+ if self.fcx.writeback_errors.get() {
+ return;
+ }
+
+ for (upvar_id, upvar_borrow) in self.fcx.inh.upvar_borrow_map.borrow().iter() {
+ let r = upvar_borrow.region;
+ let r = r.resolve(self.fcx, ResolvingUpvar(*upvar_id));
+ let new_upvar_borrow = ty::UpvarBorrow { kind: upvar_borrow.kind,
+ region: r };
+ debug!("Upvar borrow for {} resolved to {}",
+ upvar_id.repr(self.tcx()),
+ new_upvar_borrow.repr(self.tcx()));
+ self.fcx.tcx().upvar_borrow_map.borrow_mut().insert(
+ *upvar_id, new_upvar_borrow);
}
- None => {}
}
- // Resolve the type of the node with id `id`
- let n_ty = fcx.node_ty(id);
- match resolve_type_vars_in_type(fcx, sp, n_ty) {
- None => {
- wbcx.success = false;
- }
-
- Some(t) => {
- debug!("resolve_type_vars_for_node(id={}, n_ty={}, t={})",
- id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
- write_ty_to_tcx(tcx, id, t);
- fcx.opt_node_ty_substs(id, |substs| {
- let mut new_tps = Vec::new();
- for subst in substs.tps.iter() {
- match resolve_type_vars_in_type(fcx, sp, *subst) {
- Some(t) => new_tps.push(t),
- None => { wbcx.success = false; break }
- }
- }
- write_substs_to_tcx(tcx, id, new_tps);
- wbcx.success
+ fn visit_node_id(&self, reason: ResolveReason, id: ast::NodeId) {
+ // Resolve any borrowings for the node with id `id`
+ self.visit_adjustments(reason, id);
+
+ // Resolve the type of the node with id `id`
+ let n_ty = self.fcx.node_ty(id);
+ let n_ty = n_ty.resolve(self.fcx, reason);
+ write_ty_to_tcx(self.tcx(), id, n_ty);
+ debug!("Node {} has type {}", id, n_ty.repr(self.tcx()));
+
+ // Resolve any substitutions
+ self.fcx.opt_node_ty_substs(id, |node_substs| {
+ let mut new_tps = Vec::new();
+ for subst in node_substs.tps.iter() {
+ new_tps.push(subst.resolve(self.fcx, reason));
+ }
+ write_substs_to_tcx(self.tcx(), id, new_tps);
});
- }
}
-}
-struct WbCtxt<'a> {
- fcx: &'a FnCtxt<'a>,
+ fn visit_adjustments(&self, reason: ResolveReason, id: ast::NodeId) {
+ match self.fcx.inh.adjustments.borrow_mut().pop(&id) {
+ None => {
+ debug!("No adjustments for node {}", id);
+ }
- // As soon as we hit an error we have to stop resolving
- // the entire function.
- success: bool,
-}
+ Some(adjustment) => {
+ let resolved_adjustment = match adjustment {
+ ty::AutoAddEnv(store) => {
+ // FIXME(eddyb) #2190 Allow only statically resolved
+ // bare functions to coerce to a closure to avoid
+ // constructing (slower) indirect call wrappers.
+ match self.tcx().def_map.borrow().find(&id) {
+ Some(&ast::DefFn(..)) |
+ Some(&ast::DefStaticMethod(..)) |
+ Some(&ast::DefVariant(..)) |
+ Some(&ast::DefStruct(_)) => {
+ }
+ _ => {
+ self.tcx().sess.span_err(
+ reason.span(self.fcx),
+ "cannot coerce non-statically resolved bare fn")
+ }
+ }
-fn visit_stmt(s: &ast::Stmt, wbcx: &mut WbCtxt) {
- if !wbcx.success { return; }
- resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s));
- visit::walk_stmt(wbcx, s, ());
-}
+ ty::AutoAddEnv(store.resolve(self.fcx, reason))
+ }
+
+ ty::AutoDerefRef(adj) => {
+ for autoderef in range(0, adj.autoderefs) {
+ let method_call = MethodCall::autoderef(id, autoderef as u32);
+ self.visit_method_map_entry(reason, method_call);
+ self.visit_vtable_map_entry(reason, method_call);
+ }
-fn visit_expr(e: &ast::Expr, wbcx: &mut WbCtxt) {
- if !wbcx.success {
- return;
+ ty::AutoDerefRef(ty::AutoDerefRef {
+ autoderefs: adj.autoderefs,
+ autoref: adj.autoref.resolve(self.fcx, reason),
+ })
+ }
+
+ adjustment => adjustment
+ };
+ debug!("Adjustments for node {}: {:?}", id, resolved_adjustment);
+ self.tcx().adjustments.borrow_mut().insert(
+ id, resolved_adjustment);
+ }
+ }
}
- resolve_type_vars_for_node(wbcx, e.span, e.id);
- resolve_method_map_entry(wbcx, e.span, MethodCall::expr(e.id));
- resolve_vtable_map_entry(wbcx.fcx, e.span, MethodCall::expr(e.id));
+ fn visit_method_map_entry(&self,
+ reason: ResolveReason,
+ method_call: MethodCall) {
+ // Resolve any method map entry
+ match self.fcx.inh.method_map.borrow_mut().pop(&method_call) {
+ Some(method) => {
+ debug!("writeback::resolve_method_map_entry(call={:?}, entry={})",
+ method_call,
+ method.repr(self.tcx()));
+ let mut new_method = MethodCallee {
+ origin: method.origin,
+ ty: method.ty.resolve(self.fcx, reason),
+ substs: method.substs.resolve(self.fcx, reason),
+ };
+
+ // Wack. For some reason I don't quite know, we always
+ // hard-code the self-ty and regions to these
+ // values. Changing this causes downstream errors I
+ // don't feel like investigating right now (in
+ // particular, self_ty is set to mk_err in some cases,
+ // probably for invocations on objects, and this
+ // causes encoding failures). -nmatsakis
+ new_method.substs.self_ty = None;
+ new_method.substs.regions = ty::ErasedRegions;
+
+ self.tcx().method_map.borrow_mut().insert(
+ method_call,
+ new_method);
+ }
+ None => {}
+ }
+ }
- match e.node {
- ast::ExprFnBlock(ref decl, _) | ast::ExprProc(ref decl, _) => {
- for input in decl.inputs.iter() {
- let _ = resolve_type_vars_for_node(wbcx, e.span, input.id);
+ fn visit_vtable_map_entry(&self,
+ reason: ResolveReason,
+ vtable_key: MethodCall) {
+ // Resolve any vtable map entry
+ match self.fcx.inh.vtable_map.borrow_mut().pop(&vtable_key) {
+ Some(origins) => {
+ let r_origins = origins.resolve(self.fcx, reason);
+ debug!("writeback::resolve_vtable_map_entry(\
+ vtable_key={}, vtables={:?})",
+ vtable_key, r_origins.repr(self.tcx()));
+ self.tcx().vtable_map.borrow_mut().insert(vtable_key, r_origins);
}
+ None => {}
}
- _ => {}
}
+}
+
+///////////////////////////////////////////////////////////////////////////
+// Resolution reason.
- visit::walk_expr(wbcx, e, ());
+enum ResolveReason {
+ ResolvingExpr(Span),
+ ResolvingLocal(Span),
+ ResolvingPattern(Span),
+ ResolvingUpvar(ty::UpvarId)
}
-fn visit_block(b: &ast::Block, wbcx: &mut WbCtxt) {
- if !wbcx.success {
- return;
+impl ResolveReason {
+ fn span(&self, fcx: &FnCtxt) -> Span {
+ match *self {
+ ResolvingExpr(s) => s,
+ ResolvingLocal(s) => s,
+ ResolvingPattern(s) => s,
+ ResolvingUpvar(upvar_id) => {
+ ty::expr_span(fcx.tcx(), upvar_id.closure_expr_id)
+ }
+ }
}
+}
+
+///////////////////////////////////////////////////////////////////////////
+// Convenience methods for resolving different kinds of things.
- resolve_type_vars_for_node(wbcx, b.span, b.id);
- visit::walk_block(wbcx, b, ());
+trait Resolve {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> Self;
}
-fn visit_pat(p: &ast::Pat, wbcx: &mut WbCtxt) {
- if !wbcx.success {
- return;
+impl<T:Resolve> Resolve for Option<T> {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> Option<T> {
+ self.as_ref().map(|t| t.resolve(fcx, reason))
}
+}
- resolve_type_vars_for_node(wbcx, p.span, p.id);
- debug!("Type for pattern binding {} (id {}) resolved to {}",
- pat_to_str(p), p.id,
- wbcx.fcx.infcx().ty_to_str(
- ty::node_id_to_type(wbcx.fcx.ccx.tcx,
- p.id)));
- visit::walk_pat(wbcx, p, ());
+impl<T:Resolve> Resolve for Vec<T> {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> Vec<T> {
+ self.iter().map(|t| t.resolve(fcx, reason)).collect()
+ }
}
-fn visit_local(l: &ast::Local, wbcx: &mut WbCtxt) {
- if !wbcx.success { return; }
- let var_ty = wbcx.fcx.local_ty(l.span, l.id);
- match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) {
- Ok(lty) => {
- debug!("Type for local {} (id {}) resolved to {}",
- pat_to_str(l.pat),
- l.id,
- wbcx.fcx.infcx().ty_to_str(lty));
- write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.id, lty);
- }
- Err(e) => {
- wbcx.fcx.ccx.tcx.sess.span_err(
- l.span,
- format!("cannot determine a type \
- for this local variable: {}",
- infer::fixup_err_to_str(e)));
- wbcx.success = false;
- }
+impl Resolve for ty::TraitStore {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> ty::TraitStore {
+ Resolver::new(fcx, reason).fold_trait_store(*self)
}
- visit::walk_local(wbcx, l, ());
}
-fn visit_item(_item: &ast::Item, _wbcx: &mut WbCtxt) {
- // Ignore items
+
+impl Resolve for ty::t {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> ty::t {
+ Resolver::new(fcx, reason).fold_ty(*self)
+ }
}
-impl<'a> Visitor<()> for WbCtxt<'a> {
- fn visit_item(&mut self, i: &ast::Item, _: ()) { visit_item(i, self); }
- fn visit_stmt(&mut self, s: &ast::Stmt, _: ()) { visit_stmt(s, self); }
- fn visit_expr(&mut self, ex:&ast::Expr, _: ()) { visit_expr(ex, self); }
- fn visit_block(&mut self, b: &ast::Block, _: ()) { visit_block(b, self); }
- fn visit_pat(&mut self, p: &ast::Pat, _: ()) { visit_pat(p, self); }
- fn visit_local(&mut self, l: &ast::Local, _: ()) { visit_local(l, self); }
- // FIXME(#10894) should continue recursing
- fn visit_ty(&mut self, _t: &ast::Ty, _: ()) {}
+impl Resolve for ty::Region {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> ty::Region {
+ Resolver::new(fcx, reason).fold_region(*self)
+ }
}
-fn resolve_upvar_borrow_map(wbcx: &mut WbCtxt) {
- if !wbcx.success {
- return;
+impl Resolve for ty::substs {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> ty::substs {
+ Resolver::new(fcx, reason).fold_substs(self)
}
+}
- let fcx = wbcx.fcx;
- let tcx = fcx.tcx();
- for (upvar_id, upvar_borrow) in fcx.inh.upvar_borrow_map.borrow().iter() {
- let r = upvar_borrow.region;
- match resolve_region(fcx.infcx(), r, resolve_all | force_all) {
- Ok(r) => {
- let new_upvar_borrow = ty::UpvarBorrow {
- kind: upvar_borrow.kind,
- region: r
- };
- debug!("Upvar borrow for {} resolved to {}",
- upvar_id.repr(tcx), new_upvar_borrow.repr(tcx));
- tcx.upvar_borrow_map.borrow_mut().insert(*upvar_id,
- new_upvar_borrow);
+impl Resolve for ty::AutoRef {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> ty::AutoRef {
+ Resolver::new(fcx, reason).fold_autoref(self)
+ }
+}
+
+impl Resolve for vtable_origin {
+ fn resolve(&self, fcx: &FnCtxt, reason: ResolveReason) -> vtable_origin {
+ match *self {
+ vtable_static(def_id, ref tys, ref origins) => {
+ let r_tys = tys.resolve(fcx, reason);
+ let r_origins = origins.resolve(fcx, reason);
+ vtable_static(def_id, r_tys, r_origins)
}
- Err(e) => {
- let span = ty::expr_span(tcx, upvar_id.closure_expr_id);
- fcx.ccx.tcx.sess.span_err(
- span, format!("cannot resolve lifetime for \
- captured variable `{}`: {}",
- ty::local_var_name_str(tcx, upvar_id.var_id).get().to_str(),
- infer::fixup_err_to_str(e)));
- wbcx.success = false;
+ vtable_param(n, b) => {
+ vtable_param(n, b)
}
- };
+ }
}
}
-pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &ast::Expr) -> bool {
- let mut wbcx = WbCtxt { fcx: fcx, success: true };
- let wbcx = &mut wbcx;
- wbcx.visit_expr(e, ());
- resolve_upvar_borrow_map(wbcx);
- return wbcx.success;
+///////////////////////////////////////////////////////////////////////////
+// The Resolver. This is the type folding engine that detects
+// unresolved types and so forth.
+
+struct Resolver<'cx> {
+ fcx: &'cx FnCtxt<'cx>,
+ reason: ResolveReason,
}
-pub fn resolve_type_vars_in_fn(fcx: &FnCtxt, decl: &ast::FnDecl,
- blk: &ast::Block) -> bool {
- let mut wbcx = WbCtxt { fcx: fcx, success: true };
- let wbcx = &mut wbcx;
- wbcx.visit_block(blk, ());
- for arg in decl.inputs.iter() {
- wbcx.visit_pat(arg.pat, ());
- // Privacy needs the type for the whole pattern, not just each binding
- if !pat_util::pat_is_binding(&fcx.tcx().def_map, arg.pat) {
- resolve_type_vars_for_node(wbcx, arg.pat.span, arg.pat.id);
+impl<'cx> Resolver<'cx> {
+ fn new(fcx: &'cx FnCtxt<'cx>,
+ reason: ResolveReason)
+ -> Resolver<'cx>
+ {
+ Resolver { fcx: fcx, reason: reason }
+ }
+
+ fn report_error(&self, e: infer::fixup_err) {
+ self.fcx.writeback_errors.set(true);
+ if !self.tcx().sess.has_errors() {
+ match self.reason {
+ ResolvingExpr(span) => {
+ self.tcx().sess.span_err(
+ span,
+ format!("cannot determine a type for \
+ this expression: {}",
+ infer::fixup_err_to_str(e)))
+ }
+
+ ResolvingLocal(span) => {
+ self.tcx().sess.span_err(
+ span,
+ format!("cannot determine a type for \
+ this local variable: {}",
+ infer::fixup_err_to_str(e)))
+ }
+
+ ResolvingPattern(span) => {
+ self.tcx().sess.span_err(
+ span,
+ format!("cannot determine a type for \
+ this pattern binding: {}",
+ infer::fixup_err_to_str(e)))
+ }
+
+ ResolvingUpvar(upvar_id) => {
+ let span = self.reason.span(self.fcx);
+ self.tcx().sess.span_err(
+ span,
+ format!("cannot resolve lifetime for \
+ captured variable `{}`: {}",
+ ty::local_var_name_str(
+ self.tcx(),
+ upvar_id.var_id).get().to_str(),
+ infer::fixup_err_to_str(e)));
+ }
+ }
+ }
+ }
+}
+
+impl<'cx> TypeFolder for Resolver<'cx> {
+ fn tcx<'a>(&'a self) -> &'a ty::ctxt {
+ self.fcx.tcx()
+ }
+
+ fn fold_ty(&mut self, t: ty::t) -> ty::t {
+ if !ty::type_needs_infer(t) {
+ return t;
+ }
+
+ match resolve_type(self.fcx.infcx(), t, resolve_all | force_all) {
+ Ok(t) => t,
+ Err(e) => {
+ self.report_error(e);
+ ty::mk_err()
+ }
+ }
+ }
+
+ fn fold_region(&mut self, r: ty::Region) -> ty::Region {
+ match resolve_region(self.fcx.infcx(), r, resolve_all | force_all) {
+ Ok(r) => r,
+ Err(e) => {
+ self.report_error(e);
+ ty::ReStatic
+ }
}
}
- resolve_upvar_borrow_map(wbcx);
- return wbcx.success;
}
ty_bot => "!".to_owned(),
ty_bool => "bool".to_owned(),
ty_char => "char".to_owned(),
- ty_int(t) => ast_util::int_ty_to_str(t, None),
- ty_uint(t) => ast_util::uint_ty_to_str(t, None),
- ty_float(t) => ast_util::float_ty_to_str(t),
+ ty_int(t) => ast_util::int_ty_to_str(t, None).to_owned(),
+ ty_uint(t) => ast_util::uint_ty_to_str(t, None).to_owned(),
+ ty_float(t) => ast_util::float_ty_to_str(t).to_owned(),
ty_box(typ) => "@".to_owned() + ty_to_str(cx, typ),
ty_uniq(typ) => "~".to_owned() + ty_to_str(cx, typ),
ty_ptr(ref tm) => "*".to_owned() + mt_to_str(cx, tm),
impl Repr for Span {
fn repr(&self, tcx: &ctxt) -> ~str {
- tcx.sess.codemap().span_to_str(*self)
+ tcx.sess.codemap().span_to_str(*self).to_owned()
}
}
let id = link::find_crate_id(self.attrs.as_slice(),
t_outputs.out_filestem);
Crate {
- name: id.name,
+ name: id.name.to_owned(),
module: Some(self.module.clean()),
externs: externs,
}
let ctxt = super::ctxtkey.get().unwrap();
let cm = ctxt.sess().codemap().clone();
let sn = match cm.span_to_snippet(*self) {
- Some(x) => x,
+ Some(x) => x.to_owned(),
None => "".to_owned()
};
debug!("got snippet {}", sn);
"".to_owned()
} else {
let mut m = decl.bounds.iter().map(|s| s.to_str());
- ": " + m.collect::<~[~str]>().connect(" + ")
+ ": " + m.collect::<Vec<~str>>().connect(" + ")
},
arrow = match decl.decl.output { clean::Unit => "no", _ => "yes" },
ret = decl.decl.output)
/// Highlights some source code, returning the HTML output.
pub fn highlight(src: &str, class: Option<&str>) -> ~str {
let sess = parse::new_parse_sess();
- let fm = parse::string_to_filemap(&sess, src.to_owned(), "<stdin>".to_owned());
+ let fm = parse::string_to_filemap(&sess,
+ src.to_strbuf(),
+ "<stdin>".to_strbuf());
let mut out = io::MemWriter::new();
doit(&sess,
hi: test,
expn_info: None,
}).unwrap();
- if snip.contains("/") {
+ if snip.as_slice().contains("/") {
try!(write!(out, "<span class='comment'>{}</span>",
- Escape(snip)));
+ Escape(snip.as_slice())));
} else {
- try!(write!(out, "{}", Escape(snip)));
+ try!(write!(out, "{}", Escape(snip.as_slice())));
}
}
last = next.sp.hi;
// stringifying this token
let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap();
if klass == "" {
- try!(write!(out, "{}", Escape(snip)));
+ try!(write!(out, "{}", Escape(snip.as_slice())));
} else {
try!(write!(out, "<span class='{}'>{}</span>", klass,
- Escape(snip)));
+ Escape(snip.as_slice())));
}
}
root_path.push_str("../");
});
- cur.push(p.filename().expect("source has no filename") + bytes!(".html"));
+ cur.push(Vec::from_slice(p.filename().expect("source has no filename"))
+ .append(bytes!(".html")));
let mut w = BufferedWriter::new(try!(File::create(&cur)));
let title = format!("{} -- source", cur.filename_display());
type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() {
- std::os::set_exit_status(main_args(std::os::args()));
+ std::os::set_exit_status(main_args(std::os::args().as_slice()));
}
pub fn opts() -> Vec<getopts::OptGroup> {
impl GetAddrInfoRequest {
pub fn run(loop_: &Loop, node: Option<&str>, service: Option<&str>,
- hints: Option<ai::Hint>) -> Result<~[ai::Info], UvError> {
+ hints: Option<ai::Hint>) -> Result<Vec<ai::Info>, UvError> {
assert!(node.is_some() || service.is_some());
let (_c_node, c_node_ptr) = match node {
Some(n) => {
}
// Traverse the addrinfo linked list, producing a vector of Rust socket addresses
-pub fn accum_addrinfo(addr: &Addrinfo) -> ~[ai::Info] {
+pub fn accum_addrinfo(addr: &Addrinfo) -> Vec<ai::Info> {
unsafe {
let mut addr = addr.handle;
}
}
- return addrs.move_iter().collect();
+ addrs
}
}
/// Returns either the corresponding process object or an error which
/// occurred.
pub fn spawn(io_loop: &mut UvIoFactory, config: process::ProcessConfig)
- -> Result<(Box<Process>, ~[Option<PipeWatcher>]), UvError> {
+ -> Result<(Box<Process>, Vec<Option<PipeWatcher>>), UvError>
+ {
let cwd = config.cwd.map(|s| s.to_c_str());
let mut io = vec![config.stdin, config.stdout, config.stderr];
for slot in config.extra_io.iter() {
});
match ret {
- Ok(p) => Ok((p, ret_io.move_iter().collect())),
+ Ok(p) => Ok((p, ret_io)),
Err(e) => Err(e),
}
}
}
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
- hint: Option<ai::Hint>) -> Result<~[ai::Info], IoError> {
+ hint: Option<ai::Hint>) -> Result<Vec<ai::Info>, IoError> {
let r = GetAddrInfoRequest::run(&self.loop_, host, servname, hint);
r.map_err(uv_error_to_io_error)
}
fn spawn(&mut self, config: ProcessConfig)
-> Result<(Box<rtio::RtioProcess:Send>,
- ~[Option<Box<rtio::RtioPipe:Send>>]),
+ Vec<Option<Box<rtio::RtioPipe:Send>>>),
IoError>
{
match Process::spawn(self, config) {
}
unsafe {
- str::raw::from_utf8_owned(v.move_iter().collect())
+ str::raw::from_utf8(v.as_slice()).to_owned()
}
}
}
pub trait FromBase64 {
/// Converts the value of `self`, interpreted as base64 encoded data, into
/// an owned vector of bytes, returning the vector.
- fn from_base64(&self) -> Result<~[u8], FromBase64Error>;
+ fn from_base64(&self) -> Result<Vec<u8>, FromBase64Error>;
}
/// Errors that can occur when decoding a base64 encoded string
* ```rust
* extern crate serialize;
* use serialize::base64::{ToBase64, FromBase64, STANDARD};
- * use std::str;
*
* fn main () {
* let hello_str = bytes!("Hello, World").to_base64(STANDARD);
* println!("base64 output: {}", hello_str);
* let res = hello_str.from_base64();
* if res.is_ok() {
- * let opt_bytes = str::from_utf8_owned(res.unwrap());
+ * let opt_bytes = StrBuf::from_utf8(res.unwrap());
* if opt_bytes.is_some() {
* println!("decoded from base64: {}", opt_bytes.unwrap());
* }
* }
* ```
*/
- fn from_base64(&self) -> Result<~[u8], FromBase64Error> {
+ fn from_base64(&self) -> Result<Vec<u8>, FromBase64Error> {
let mut r = Vec::new();
let mut buf: u32 = 0;
let mut modulus = 0;
_ => return Err(InvalidBase64Length),
}
- Ok(r.move_iter().collect())
+ Ok(r)
}
}
#[test]
fn test_from_base64_basic() {
- assert_eq!("".from_base64().unwrap(), "".as_bytes().to_owned());
- assert_eq!("Zg==".from_base64().unwrap(), "f".as_bytes().to_owned());
- assert_eq!("Zm8=".from_base64().unwrap(), "fo".as_bytes().to_owned());
- assert_eq!("Zm9v".from_base64().unwrap(), "foo".as_bytes().to_owned());
- assert_eq!("Zm9vYg==".from_base64().unwrap(), "foob".as_bytes().to_owned());
- assert_eq!("Zm9vYmE=".from_base64().unwrap(), "fooba".as_bytes().to_owned());
- assert_eq!("Zm9vYmFy".from_base64().unwrap(), "foobar".as_bytes().to_owned());
+ assert_eq!("".from_base64().unwrap().as_slice(), "".as_bytes());
+ assert_eq!("Zg==".from_base64().unwrap().as_slice(), "f".as_bytes());
+ assert_eq!("Zm8=".from_base64().unwrap().as_slice(), "fo".as_bytes());
+ assert_eq!("Zm9v".from_base64().unwrap().as_slice(), "foo".as_bytes());
+ assert_eq!("Zm9vYg==".from_base64().unwrap().as_slice(), "foob".as_bytes());
+ assert_eq!("Zm9vYmE=".from_base64().unwrap().as_slice(), "fooba".as_bytes());
+ assert_eq!("Zm9vYmFy".from_base64().unwrap().as_slice(), "foobar".as_bytes());
}
#[test]
fn test_from_base64_newlines() {
- assert_eq!("Zm9v\r\nYmFy".from_base64().unwrap(),
- "foobar".as_bytes().to_owned());
- assert_eq!("Zm9vYg==\r\n".from_base64().unwrap(),
- "foob".as_bytes().to_owned());
+ assert_eq!("Zm9v\r\nYmFy".from_base64().unwrap().as_slice(),
+ "foobar".as_bytes());
+ assert_eq!("Zm9vYg==\r\n".from_base64().unwrap().as_slice(),
+ "foob".as_bytes());
}
#[test]
for _ in range(0, 1000) {
let times = task_rng().gen_range(1u, 100);
let v = Vec::from_fn(times, |_| random::<u8>());
- assert_eq!(v.as_slice().to_base64(STANDARD).from_base64().unwrap(),
- v.as_slice().to_owned());
+ assert_eq!(v.as_slice().to_base64(STANDARD).from_base64().unwrap().as_slice(),
+ v.as_slice());
}
}
}
unsafe {
- str::raw::from_utf8_owned(v.move_iter().collect())
+ str::raw::from_utf8(v.as_slice()).to_owned()
}
}
}
pub trait FromHex {
/// Converts the value of `self`, interpreted as hexadecimal encoded data,
/// into an owned vector of bytes, returning the vector.
- fn from_hex(&self) -> Result<~[u8], FromHexError>;
+ fn from_hex(&self) -> Result<Vec<u8>, FromHexError>;
}
/// Errors that can occur when decoding a hex encoded string
* ```rust
* extern crate serialize;
* use serialize::hex::{FromHex, ToHex};
- * use std::str;
*
* fn main () {
* let hello_str = "Hello, World".as_bytes().to_hex();
* println!("{}", hello_str);
* let bytes = hello_str.from_hex().unwrap();
* println!("{:?}", bytes);
- * let result_str = str::from_utf8_owned(bytes).unwrap();
+ * let result_str = StrBuf::from_utf8(bytes).unwrap();
* println!("{}", result_str);
* }
* ```
*/
- fn from_hex(&self) -> Result<~[u8], FromHexError> {
+ fn from_hex(&self) -> Result<Vec<u8>, FromHexError> {
// This may be an overestimate if there is any whitespace
let mut b = Vec::with_capacity(self.len() / 2);
let mut modulus = 0;
#[test]
pub fn test_from_hex_okay() {
- assert_eq!("666f6f626172".from_hex().unwrap(),
- "foobar".as_bytes().to_owned());
- assert_eq!("666F6F626172".from_hex().unwrap(),
- "foobar".as_bytes().to_owned());
+ assert_eq!("666f6f626172".from_hex().unwrap().as_slice(),
+ "foobar".as_bytes());
+ assert_eq!("666F6F626172".from_hex().unwrap().as_slice(),
+ "foobar".as_bytes());
}
#[test]
#[test]
pub fn test_from_hex_ignores_whitespace() {
- assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
- "foobar".as_bytes().to_owned());
+ assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap().as_slice(),
+ "foobar".as_bytes());
}
#[test]
#[test]
pub fn test_from_hex_all_bytes() {
for i in range(0, 256) {
- assert_eq!(format!("{:02x}", i as uint).from_hex().unwrap(), ~[i as u8]);
- assert_eq!(format!("{:02X}", i as uint).from_hex().unwrap(), ~[i as u8]);
+ assert_eq!(format!("{:02x}", i as uint).from_hex().unwrap().as_slice(), &[i as u8]);
+ assert_eq!(format!("{:02X}", i as uint).from_hex().unwrap().as_slice(), &[i as u8]);
}
}
pub struct TestStruct1 {
data_int: u8,
data_str: ~str,
- data_vector: ~[u8],
+ data_vector: Vec<u8>,
}
// To serialize use the `json::str_encode` to encode an object in a string.
// It calls the generated `Encodable` impl.
fn main() {
let to_encode_object = TestStruct1
- {data_int: 1, data_str:"toto".to_owned(), data_vector:~[2,3,4,5]};
+ {data_int: 1, data_str:"toto".to_owned(), data_vector:vec![2,3,4,5]};
let encoded_str: ~str = json::Encoder::str_encode(&to_encode_object);
// To deserialize use the `json::from_str` and `json::Decoder`
pub struct TestStruct1 {
data_int: u8,
data_str: ~str,
- data_vector: ~[u8],
+ data_vector: Vec<u8>,
}
impl ToJson for TestStruct1 {
// Serialization using our impl of to_json
let test2: TestStruct1 = TestStruct1 {data_int: 1, data_str:"toto".to_owned(),
- data_vector:~[2,3,4,5]};
+ data_vector:vec![2,3,4,5]};
let tjson: json::Json = test2.to_json();
let json_str: ~str = tjson.to_str();
Null,
}
-pub type List = ~[Json];
+pub type List = Vec<Json>;
pub type Object = TreeMap<~str, Json>;
/// The errors that can arise while parsing a JSON stream.
fn to_json(&self) -> Json {
match *self {
(ref a, ref b) => {
- List(box [a.to_json(), b.to_json()])
+ List(vec![a.to_json(), b.to_json()])
}
}
}
fn to_json(&self) -> Json {
match *self {
(ref a, ref b, ref c) => {
- List(box [a.to_json(), b.to_json(), c.to_json()])
+ List(vec![a.to_json(), b.to_json(), c.to_json()])
}
}
}
struct Inner {
a: (),
b: uint,
- c: ~[~str],
+ c: Vec<~str>,
}
#[deriving(Eq, Encodable, Decodable, Show)]
struct Outer {
- inner: ~[Inner],
+ inner: Vec<Inner>,
}
fn mk_object(items: &[(~str, Json)]) -> Json {
#[test]
fn test_write_list() {
- assert_eq!(List(~[]).to_str(), "[]".to_owned());
- assert_eq!(List(~[]).to_pretty_str(), "[]".to_owned());
+ assert_eq!(List(vec![]).to_str(), "[]".to_owned());
+ assert_eq!(List(vec![]).to_pretty_str(), "[]".to_owned());
- assert_eq!(List(~[Boolean(true)]).to_str(), "[true]".to_owned());
+ assert_eq!(List(vec![Boolean(true)]).to_str(), "[true]".to_owned());
assert_eq!(
- List(~[Boolean(true)]).to_pretty_str(),
+ List(vec![Boolean(true)]).to_pretty_str(),
"\
[\n \
true\n\
]".to_owned()
);
- let long_test_list = List(box [
+ let long_test_list = List(vec![
Boolean(false),
Null,
- List(box [String("foo\nbar".to_owned()), Number(3.5)])]);
+ List(vec![String("foo\nbar".to_owned()), Number(3.5)])]);
assert_eq!(long_test_list.to_str(),
"[false,null,[\"foo\\nbar\",3.5]]".to_owned());
);
let complex_obj = mk_object([
- ("b".to_owned(), List(box [
+ ("b".to_owned(), List(vec![
mk_object([("c".to_owned(), String("\x0c\r".to_owned()))]),
mk_object([("d".to_owned(), String("".to_owned()))])
]))
let a = mk_object([
("a".to_owned(), Boolean(true)),
- ("b".to_owned(), List(box [
+ ("b".to_owned(), List(vec![
mk_object([("c".to_owned(), String("\x0c\r".to_owned()))]),
mk_object([("d".to_owned(), String("".to_owned()))])
]))
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("[]"), Ok(List(~[])));
- assert_eq!(from_str("[ ]"), Ok(List(~[])));
- assert_eq!(from_str("[true]"), Ok(List(~[Boolean(true)])));
- assert_eq!(from_str("[ false ]"), Ok(List(~[Boolean(false)])));
- assert_eq!(from_str("[null]"), Ok(List(~[Null])));
+ assert_eq!(from_str("[]"), Ok(List(vec![])));
+ assert_eq!(from_str("[ ]"), Ok(List(vec![])));
+ assert_eq!(from_str("[true]"), Ok(List(vec![Boolean(true)])));
+ assert_eq!(from_str("[ false ]"), Ok(List(vec![Boolean(false)])));
+ assert_eq!(from_str("[null]"), Ok(List(vec![Null])));
assert_eq!(from_str("[3, 1]"),
- Ok(List(~[Number(3.0), Number(1.0)])));
+ Ok(List(vec![Number(3.0), Number(1.0)])));
assert_eq!(from_str("\n[3, 2]\n"),
- Ok(List(~[Number(3.0), Number(2.0)])));
+ Ok(List(vec![Number(3.0), Number(2.0)])));
assert_eq!(from_str("[2, [4, 1]]"),
- Ok(List(~[Number(2.0), List(~[Number(4.0), Number(1.0)])])));
+ Ok(List(vec![Number(2.0), List(vec![Number(4.0), Number(1.0)])])));
}
#[test]
fn test_decode_list() {
let mut decoder = Decoder::new(from_str("[]").unwrap());
- let v: ~[()] = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(v, ~[]);
+ let v: Vec<()> = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(v, vec![]);
let mut decoder = Decoder::new(from_str("[null]").unwrap());
- let v: ~[()] = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(v, ~[()]);
+ let v: Vec<()> = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(v, vec![()]);
let mut decoder = Decoder::new(from_str("[true]").unwrap());
- let v: ~[bool] = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(v, ~[true]);
+ let v: Vec<bool> = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(v, vec![true]);
let mut decoder = Decoder::new(from_str("[true]").unwrap());
- let v: ~[bool] = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(v, ~[true]);
+ let v: Vec<bool> = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(v, vec![true]);
let mut decoder = Decoder::new(from_str("[3, 1]").unwrap());
- let v: ~[int] = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(v, ~[3, 1]);
+ let v: Vec<int> = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(v, vec![3, 1]);
let mut decoder = Decoder::new(from_str("[[3], [1, 2]]").unwrap());
- let v: ~[~[uint]] = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(v, ~[~[3], ~[1, 2]]);
+ let v: Vec<Vec<uint>> = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(v, vec![vec![3], vec![1, 2]]);
}
#[test]
"{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
mk_object([
("a".to_owned(), Number(1.0)),
- ("b".to_owned(), List(~[Boolean(true)]))
+ ("b".to_owned(), List(vec![Boolean(true)]))
]));
assert_eq!(from_str(
"{".to_owned() +
"}").unwrap(),
mk_object([
("a".to_owned(), Number(1.0)),
- ("b".to_owned(), List(~[
+ ("b".to_owned(), List(vec![
Boolean(true),
String("foo\nbar".to_owned()),
mk_object([
assert_eq!(
v,
Outer {
- inner: ~[
- Inner { a: (), b: 2, c: ~["abc".to_owned(), "xyz".to_owned()] }
+ inner: vec![
+ Inner { a: (), b: 2, c: vec!["abc".to_owned(), "xyz".to_owned()] }
]
}
);
x: f64,
y: bool,
z: ~str,
- w: ~[DecodeStruct]
+ w: Vec<DecodeStruct>
}
#[deriving(Decodable)]
enum DecodeEnum {
impl<E, D:Decoder<E>,T:Decodable<D, E>> Decodable<D, E> for ~[T] {
fn decode(d: &mut D) -> Result<~[T], E> {
+ use std::vec::FromVec;
+
d.read_seq(|d, len| {
let mut v: Vec<T> = Vec::with_capacity(len);
for i in range(0, len) {
v.push(try!(d.read_seq_elt(i, |d| Decodable::decode(d))));
}
- let k = v.move_iter().collect::<~[T]>();
+ let k: ~[T] = FromVec::from_vec(v);
Ok(k)
})
}
impl<E, D: Decoder<E>> Decodable<D, E> for path::posix::Path {
fn decode(d: &mut D) -> Result<path::posix::Path, E> {
- let bytes: ~[u8] = try!(Decodable::decode(d));
+ let bytes: Vec<u8> = try!(Decodable::decode(d));
Ok(path::posix::Path::new(bytes))
}
}
impl<E, D: Decoder<E>> Decodable<D, E> for path::windows::Path {
fn decode(d: &mut D) -> Result<path::windows::Path, E> {
- let bytes: ~[u8] = try!(Decodable::decode(d));
+ let bytes: Vec<u8> = try!(Decodable::decode(d));
Ok(path::windows::Path::new(bytes))
}
}
}
pub trait DecoderHelpers<E> {
- fn read_to_vec<T>(&mut self, f: |&mut Self| -> Result<T, E>) -> Result<~[T], E>;
+ fn read_to_vec<T>(&mut self, f: |&mut Self| -> Result<T, E>) -> Result<Vec<T>, E>;
}
impl<E, D:Decoder<E>> DecoderHelpers<E> for D {
- fn read_to_vec<T>(&mut self, f: |&mut D| -> Result<T, E>) -> Result<~[T], E> {
+ fn read_to_vec<T>(&mut self, f: |&mut D| -> Result<T, E>) -> Result<Vec<T>, E> {
self.read_seq(|this, len| {
let mut v = Vec::with_capacity(len);
for i in range(0, len) {
v.push(try!(this.read_seq_elt(i, |this| f(this))));
}
- Ok(v.move_iter().collect())
+ Ok(v)
})
}
}
use to_str::{IntoStr};
use str;
use str::Str;
-use str::StrSlice;
+use str::{StrAllocating, StrSlice};
use str::OwnedStr;
use container::Container;
use cast;
/// Take ownership and cast to an ascii vector. Fail on non-ASCII input.
#[inline]
- fn into_ascii(self) -> ~[Ascii] {
+ fn into_ascii(self) -> Vec<Ascii> {
assert!(self.is_ascii());
unsafe {self.into_ascii_nocheck()}
}
/// Take ownership and cast to an ascii vector. Return None on non-ASCII input.
#[inline]
- fn into_ascii_opt(self) -> Option<~[Ascii]> {
+ fn into_ascii_opt(self) -> Option<Vec<Ascii>> {
if self.is_ascii() {
Some(unsafe { self.into_ascii_nocheck() })
} else {
/// Take ownership and cast to an ascii vector.
/// Does not perform validation checks.
- unsafe fn into_ascii_nocheck(self) -> ~[Ascii];
+ unsafe fn into_ascii_nocheck(self) -> Vec<Ascii>;
}
impl OwnedAsciiCast for ~[u8] {
}
#[inline]
- unsafe fn into_ascii_nocheck(self) -> ~[Ascii] {
- cast::transmute(self)
+ unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
+ cast::transmute(Vec::from_slice(self.as_slice()))
}
}
}
#[inline]
- unsafe fn into_ascii_nocheck(self) -> ~[Ascii] {
+ unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
+ let v: ~[u8] = cast::transmute(self);
+ v.into_ascii_nocheck()
+ }
+}
+
+impl OwnedAsciiCast for Vec<u8> {
+ #[inline]
+ fn is_ascii(&self) -> bool {
+ self.as_slice().is_ascii()
+ }
+
+ #[inline]
+ unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
cast::transmute(self)
}
}
fn as_str_ascii<'a>(&'a self) -> &'a str;
/// Convert to vector representing a lower cased ascii string.
- fn to_lower(&self) -> ~[Ascii];
+ fn to_lower(&self) -> Vec<Ascii>;
/// Convert to vector representing a upper cased ascii string.
- fn to_upper(&self) -> ~[Ascii];
+ fn to_upper(&self) -> Vec<Ascii>;
/// Compares two Ascii strings ignoring case.
fn eq_ignore_case(self, other: &[Ascii]) -> bool;
}
#[inline]
- fn to_lower(&self) -> ~[Ascii] {
+ fn to_lower(&self) -> Vec<Ascii> {
self.iter().map(|a| a.to_lower()).collect()
}
#[inline]
- fn to_upper(&self) -> ~[Ascii] {
+ fn to_upper(&self) -> Vec<Ascii> {
self.iter().map(|a| a.to_upper()).collect()
}
impl IntoStr for Vec<Ascii> {
#[inline]
fn into_str(self) -> ~str {
- let v: ~[Ascii] = self.move_iter().collect();
- unsafe { cast::transmute(v) }
+ unsafe {
+ let s: &str = cast::transmute(self.as_slice());
+ s.to_owned()
+ }
}
}
-/// Trait to convert to an owned byte array by consuming self
+/// Trait to convert to an owned byte vector by consuming self
pub trait IntoBytes {
- /// Converts to an owned byte array by consuming self
- fn into_bytes(self) -> ~[u8];
+ /// Converts to an owned byte vector by consuming self
+ fn into_bytes(self) -> Vec<u8>;
}
-impl IntoBytes for ~[Ascii] {
- fn into_bytes(self) -> ~[u8] {
+impl IntoBytes for Vec<Ascii> {
+ fn into_bytes(self) -> Vec<u8> {
unsafe { cast::transmute(self) }
}
}
#[inline]
unsafe fn str_copy_map_bytes(string: &str, map: &'static [u8]) -> ~str {
- let bytes = string.bytes().map(|b| map[b as uint]).collect::<~[_]>();
-
- str::raw::from_utf8_owned(bytes)
+ let mut s = string.to_owned();
+ for b in str::raw::as_owned_vec(&mut s).mut_iter() {
+ *b = map[*b as uint];
+ }
+ s
}
static ASCII_LOWER_MAP: &'static [u8] = &[
macro_rules! v2ascii (
( [$($e:expr),*]) => (&[$(Ascii{chr:$e}),*]);
(&[$($e:expr),*]) => (&[$(Ascii{chr:$e}),*]);
- (~[$($e:expr),*]) => (box [$(Ascii{chr:$e}),*]);
)
macro_rules! vec2ascii (
#[test]
fn test_ascii_vec_ng() {
- assert_eq!(Vec::from_slice("abCDef&?#".to_ascii().to_lower()).into_str(),
- "abcdef&?#".to_owned());
- assert_eq!(Vec::from_slice("abCDef&?#".to_ascii().to_upper()).into_str(),
- "ABCDEF&?#".to_owned());
- assert_eq!(Vec::from_slice("".to_ascii().to_lower()).into_str(), "".to_owned());
- assert_eq!(Vec::from_slice("YMCA".to_ascii().to_lower()).into_str(), "ymca".to_owned());
- assert_eq!(Vec::from_slice("abcDEFxyz:.;".to_ascii().to_upper()).into_str(),
- "ABCDEFXYZ:.;".to_owned());
+ assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_owned());
+ assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_owned());
+ assert_eq!("".to_ascii().to_lower().into_str(), "".to_owned());
+ assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_owned());
+ assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_owned());
}
#[test]
fn test_owned_ascii_vec() {
- assert_eq!(("( ;".to_owned()).into_ascii(), v2ascii!(~[40, 32, 59]));
- assert_eq!((box [40u8, 32u8, 59u8]).into_ascii(), v2ascii!(~[40, 32, 59]));
+ assert_eq!(("( ;".to_owned()).into_ascii(), vec2ascii![40, 32, 59]);
+ assert_eq!((box [40u8, 32u8, 59u8]).into_ascii(), vec2ascii![40, 32, 59]);
}
#[test]
#[test]
fn test_ascii_into_str() {
- assert_eq!(v2ascii!(~[40, 32, 59]).into_str(), "( ;".to_owned());
+ assert_eq!(vec2ascii![40, 32, 59].into_str(), "( ;".to_owned());
assert_eq!(vec2ascii!(40, 32, 59).into_str(), "( ;".to_owned());
}
#[test]
fn test_ascii_to_bytes() {
- assert_eq!(v2ascii!(~[40, 32, 59]).into_bytes(), box [40u8, 32u8, 59u8]);
+ assert_eq!(vec2ascii![40, 32, 59].into_bytes(), vec![40u8, 32u8, 59u8]);
}
#[test] #[should_fail]
assert_eq!(v.to_ascii_opt(), Some(v2));
assert_eq!("zoä华".to_ascii_opt(), None);
- assert_eq!((box [40u8, 32u8, 59u8]).into_ascii_opt(), Some(v2ascii!(~[40, 32, 59])));
- assert_eq!((box [127u8, 128u8, 255u8]).into_ascii_opt(), None);
+ assert_eq!((vec![40u8, 32u8, 59u8]).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
+ assert_eq!((vec![127u8, 128u8, 255u8]).into_ascii_opt(), None);
- assert_eq!(("( ;".to_owned()).into_ascii_opt(), Some(v2ascii!(~[40, 32, 59])));
+ assert_eq!(("( ;".to_owned()).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
assert_eq!(("zoä华".to_owned()).into_ascii_opt(), None);
}
pub use self::num::RadixFmt;
mod num;
-pub mod parse;
pub mod rt;
+#[cfg(stage0)]
+#[allow(missing_doc)]
+pub mod parse {
+ #[deriving(Eq)]
+ pub enum Alignment {
+ AlignLeft,
+ AlignRight,
+ AlignUnknown,
+ }
+
+ pub enum PluralKeyword {
+ Zero,
+ One,
+ Two,
+ Few,
+ Many,
+ }
+
+ pub enum Flag {
+ FlagSignPlus,
+ FlagSignMinus,
+ FlagAlternate,
+ FlagSignAwareZeroPad,
+ }
+}
+
pub type Result = io::IoResult<()>;
/// A struct to represent both where to emit formatting strings to and how they
/// Character used as 'fill' whenever there is alignment
pub fill: char,
/// Boolean indication of whether the output should be left-aligned
- pub align: parse::Alignment,
+ pub align: rt::Alignment,
/// Optionally specified integer width that the output should be
pub width: Option<uint>,
/// Optionally specified precision for numeric types
width: None,
precision: None,
buf: output,
- align: parse::AlignUnknown,
+ align: rt::AlignUnknown,
fill: ' ',
args: args,
curarg: args.iter(),
let value = value - match offset { Some(i) => i, None => 0 };
for s in selectors.iter() {
let run = match s.selector {
- rt::Keyword(parse::Zero) => value == 0,
- rt::Keyword(parse::One) => value == 1,
- rt::Keyword(parse::Two) => value == 2,
+ rt::Keyword(rt::Zero) => value == 0,
+ rt::Keyword(rt::One) => value == 1,
+ rt::Keyword(rt::Two) => value == 2,
// FIXME: Few/Many should have a user-specified boundary
// One possible option would be in the function
// pointer of the 'arg: Argument' struct.
- rt::Keyword(parse::Few) => value < 8,
- rt::Keyword(parse::Many) => value >= 8,
+ rt::Keyword(rt::Few) => value < 8,
+ rt::Keyword(rt::Many) => value >= 8,
rt::Literal(..) => false
};
/// This function will correctly account for the flags provided as well as
/// the minimum width. It will not take precision into account.
pub fn pad_integral(&mut self, is_positive: bool, prefix: &str, buf: &[u8]) -> Result {
- use fmt::parse::{FlagAlternate, FlagSignPlus, FlagSignAwareZeroPad};
+ use fmt::rt::{FlagAlternate, FlagSignPlus, FlagSignAwareZeroPad};
let mut width = buf.len();
Some(min) if self.flags & (1 << (FlagSignAwareZeroPad as uint)) != 0 => {
self.fill = '0';
try!(write_prefix(self));
- self.with_padding(min - width, parse::AlignRight, |f| f.buf.write(buf))
+ self.with_padding(min - width, rt::AlignRight, |f| f.buf.write(buf))
}
// Otherwise, the sign and prefix goes after the padding
Some(min) => {
- self.with_padding(min - width, parse::AlignRight, |f| {
+ self.with_padding(min - width, rt::AlignRight, |f| {
try!(write_prefix(f)); f.buf.write(buf)
})
}
// If we're under both the maximum and the minimum width, then fill
// up the minimum width with the specified string + some alignment.
Some(width) => {
- self.with_padding(width - s.len(), parse::AlignLeft, |me| {
+ self.with_padding(width - s.len(), rt::AlignLeft, |me| {
me.buf.write(s.as_bytes())
})
}
/// afterwards depending on whether right or left alingment is requested.
fn with_padding(&mut self,
padding: uint,
- default: parse::Alignment,
+ default: rt::Alignment,
f: |&mut Formatter| -> Result) -> Result {
let align = match self.align {
- parse::AlignUnknown => default,
- parse::AlignLeft | parse::AlignRight => self.align
+ rt::AlignUnknown => default,
+ rt::AlignLeft | rt::AlignRight => self.align
};
- if align == parse::AlignLeft {
+ if align == rt::AlignLeft {
try!(f(self));
}
let mut fill = [0u8, ..4];
for _ in range(0, padding) {
try!(self.buf.write(fill.slice_to(len)));
}
- if align == parse::AlignRight {
+ if align == rt::AlignRight {
try!(f(self));
}
Ok(())
impl<T> Pointer for *T {
fn fmt(&self, f: &mut Formatter) -> Result {
- f.flags |= 1 << (parse::FlagAlternate as uint);
+ f.flags |= 1 << (rt::FlagAlternate as uint);
secret_lower_hex::<uint>(&(*self as uint), f)
}
}
impl<'a, T: Show> Show for &'a [T] {
fn fmt(&self, f: &mut Formatter) -> Result {
- if f.flags & (1 << (parse::FlagAlternate as uint)) == 0 {
+ if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 {
try!(write!(f.buf, "["));
}
let mut is_first = true;
}
try!(write!(f.buf, "{}", *x))
}
- if f.flags & (1 << (parse::FlagAlternate as uint)) == 0 {
+ if f.flags & (1 << (rt::FlagAlternate as uint)) == 0 {
try!(write!(f.buf, "]"));
}
Ok(())
+++ /dev/null
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Parsing of format strings
-//!
-//! These structures are used when parsing format strings for the compiler.
-//! Parsing does not happen at runtime: structures of `std::fmt::rt` are
-//! generated instead.
-
-use prelude::*;
-
-use char;
-use owned::Box;
-use str;
-
-/// A piece is a portion of the format string which represents the next part
-/// to emit. These are emitted as a stream by the `Parser` class.
-#[deriving(Eq)]
-pub enum Piece<'a> {
- /// A literal string which should directly be emitted
- String(&'a str),
- /// A back-reference to whatever the current argument is. This is used
- /// inside of a method call to refer back to the original argument.
- CurrentArgument,
- /// This describes that formatting should process the next argument (as
- /// specified inside) for emission.
- Argument(Argument<'a>),
-}
-
-/// Representation of an argument specification.
-#[deriving(Eq)]
-pub struct Argument<'a> {
- /// Where to find this argument
- pub position: Position<'a>,
- /// How to format the argument
- pub format: FormatSpec<'a>,
- /// If not `None`, what method to invoke on the argument
- pub method: Option<Box<Method<'a>>>
-}
-
-/// Specification for the formatting of an argument in the format string.
-#[deriving(Eq)]
-pub struct FormatSpec<'a> {
- /// Optionally specified character to fill alignment with
- pub fill: Option<char>,
- /// Optionally specified alignment
- pub align: Alignment,
- /// Packed version of various flags provided
- pub flags: uint,
- /// The integer precision to use
- pub precision: Count<'a>,
- /// The string width requested for the resulting format
- pub width: Count<'a>,
- /// The descriptor string representing the name of the format desired for
- /// this argument, this can be empty or any number of characters, although
- /// it is required to be one word.
- pub ty: &'a str
-}
-
-/// Enum describing where an argument for a format can be located.
-#[deriving(Eq)]
-pub enum Position<'a> {
- /// The argument will be in the next position. This is the default.
- ArgumentNext,
- /// The argument is located at a specific index.
- ArgumentIs(uint),
- /// The argument has a name.
- ArgumentNamed(&'a str),
-}
-
-/// Enum of alignments which are supported.
-#[deriving(Eq)]
-pub enum Alignment {
- /// The value will be aligned to the left.
- AlignLeft,
- /// The value will be aligned to the right.
- AlignRight,
- /// The value will take on a default alignment.
- AlignUnknown,
-}
-
-/// Various flags which can be applied to format strings. The meaning of these
-/// flags is defined by the formatters themselves.
-#[deriving(Eq)]
-pub enum Flag {
- /// A `+` will be used to denote positive numbers.
- FlagSignPlus,
- /// A `-` will be used to denote negative numbers. This is the default.
- FlagSignMinus,
- /// An alternate form will be used for the value. In the case of numbers,
- /// this means that the number will be prefixed with the supplied string.
- FlagAlternate,
- /// For numbers, this means that the number will be padded with zeroes,
- /// and the sign (`+` or `-`) will precede them.
- FlagSignAwareZeroPad,
-}
-
-/// A count is used for the precision and width parameters of an integer, and
-/// can reference either an argument or a literal integer.
-#[deriving(Eq)]
-pub enum Count<'a> {
- /// The count is specified explicitly.
- CountIs(uint),
- /// The count is specified by the argument with the given name.
- CountIsName(&'a str),
- /// The count is specified by the argument at the given index.
- CountIsParam(uint),
- /// The count is specified by the next parameter.
- CountIsNextParam,
- /// The count is implied and cannot be explicitly specified.
- CountImplied,
-}
-
-/// Enum describing all of the possible methods which the formatting language
-/// currently supports.
-#[deriving(Eq)]
-pub enum Method<'a> {
- /// A plural method selects on an integer over a list of either integer or
- /// keyword-defined clauses. The meaning of the keywords is defined by the
- /// current locale.
- ///
- /// An offset is optionally present at the beginning which is used to
- /// match against keywords, but it is not matched against the literal
- /// integers.
- ///
- /// The final element of this enum is the default "other" case which is
- /// always required to be specified.
- Plural(Option<uint>, Vec<PluralArm<'a>>, Vec<Piece<'a>>),
-
- /// A select method selects over a string. Each arm is a different string
- /// which can be selected for.
- ///
- /// As with `Plural`, a default "other" case is required as well.
- Select(Vec<SelectArm<'a>>, Vec<Piece<'a>>),
-}
-
-/// A selector for what pluralization a plural method should take
-#[deriving(Eq, TotalEq, Hash)]
-pub enum PluralSelector {
- /// One of the plural keywords should be used
- Keyword(PluralKeyword),
- /// A literal pluralization should be used
- Literal(uint),
-}
-
-/// Structure representing one "arm" of the `plural` function.
-#[deriving(Eq)]
-pub struct PluralArm<'a> {
- /// A selector can either be specified by a keyword or with an integer
- /// literal.
- pub selector: PluralSelector,
- /// Array of pieces which are the format of this arm
- pub result: Vec<Piece<'a>>,
-}
-
-/// Enum of the 5 CLDR plural keywords. There is one more, "other", but that
-/// is specially placed in the `Plural` variant of `Method`.
-///
-/// http://www.icu-project.org/apiref/icu4c/classicu_1_1PluralRules.html
-#[deriving(Eq, TotalEq, Hash)]
-#[allow(missing_doc)]
-pub enum PluralKeyword {
- /// The plural form for zero objects.
- Zero,
- /// The plural form for one object.
- One,
- /// The plural form for two objects.
- Two,
- /// The plural form for few objects.
- Few,
- /// The plural form for many objects.
- Many,
-}
-
-/// Structure representing one "arm" of the `select` function.
-#[deriving(Eq)]
-pub struct SelectArm<'a> {
- /// String selector which guards this arm
- pub selector: &'a str,
- /// Array of pieces which are the format of this arm
- pub result: Vec<Piece<'a>>,
-}
-
-/// The parser structure for interpreting the input format string. This is
-/// modelled as an iterator over `Piece` structures to form a stream of tokens
-/// being output.
-///
-/// This is a recursive-descent parser for the sake of simplicity, and if
-/// necessary there's probably lots of room for improvement performance-wise.
-pub struct Parser<'a> {
- input: &'a str,
- cur: str::CharOffsets<'a>,
- depth: uint,
- /// Error messages accumulated during parsing
- pub errors: Vec<~str>,
-}
-
-impl<'a> Iterator<Piece<'a>> for Parser<'a> {
- fn next(&mut self) -> Option<Piece<'a>> {
- match self.cur.clone().next() {
- Some((_, '#')) => { self.cur.next(); Some(CurrentArgument) }
- Some((_, '{')) => {
- self.cur.next();
- let ret = Some(Argument(self.argument()));
- self.must_consume('}');
- ret
- }
- Some((pos, '\\')) => {
- self.cur.next();
- self.escape(); // ensure it's a valid escape sequence
- Some(String(self.string(pos + 1))) // skip the '\' character
- }
- Some((_, '}')) if self.depth == 0 => {
- self.cur.next();
- self.err("unmatched `}` found");
- None
- }
- Some((_, '}')) | None => { None }
- Some((pos, _)) => {
- Some(String(self.string(pos)))
- }
- }
- }
-}
-
-impl<'a> Parser<'a> {
- /// Creates a new parser for the given format string
- pub fn new<'a>(s: &'a str) -> Parser<'a> {
- Parser {
- input: s,
- cur: s.char_indices(),
- depth: 0,
- errors: vec!(),
- }
- }
-
- /// Notifies of an error. The message doesn't actually need to be of type
- /// ~str, but I think it does when this eventually uses conditions so it
- /// might as well start using it now.
- fn err(&mut self, msg: &str) {
- self.errors.push(msg.to_owned());
- }
-
- /// Optionally consumes the specified character. If the character is not at
- /// the current position, then the current iterator isn't moved and false is
- /// returned, otherwise the character is consumed and true is returned.
- fn consume(&mut self, c: char) -> bool {
- match self.cur.clone().next() {
- Some((_, maybe)) if c == maybe => {
- self.cur.next();
- true
- }
- Some(..) | None => false,
- }
- }
-
- /// Forces consumption of the specified character. If the character is not
- /// found, an error is emitted.
- fn must_consume(&mut self, c: char) {
- self.ws();
- match self.cur.clone().next() {
- Some((_, maybe)) if c == maybe => {
- self.cur.next();
- }
- Some((_, other)) => {
- self.err(
- format!("expected `{}` but found `{}`", c, other));
- }
- None => {
- self.err(
- format!("expected `{}` but string was terminated", c));
- }
- }
- }
-
- /// Attempts to consume any amount of whitespace followed by a character
- fn wsconsume(&mut self, c: char) -> bool {
- self.ws(); self.consume(c)
- }
-
- /// Consumes all whitespace characters until the first non-whitespace
- /// character
- fn ws(&mut self) {
- loop {
- match self.cur.clone().next() {
- Some((_, c)) if char::is_whitespace(c) => { self.cur.next(); }
- Some(..) | None => { return }
- }
- }
- }
-
- /// Consumes an escape sequence, failing if there is not a valid character
- /// to be escaped.
- fn escape(&mut self) -> char {
- match self.cur.next() {
- Some((_, c @ '#')) | Some((_, c @ '{')) |
- Some((_, c @ '\\')) | Some((_, c @ '}')) => { c }
- Some((_, c)) => {
- self.err(format!("invalid escape character `{}`", c));
- c
- }
- None => {
- self.err("expected an escape sequence, but format string was \
- terminated");
- ' '
- }
- }
- }
-
- /// Parses all of a string which is to be considered a "raw literal" in a
- /// format string. This is everything outside of the braces.
- fn string(&mut self, start: uint) -> &'a str {
- loop {
- // we may not consume the character, so clone the iterator
- match self.cur.clone().next() {
- Some((pos, '\\')) | Some((pos, '#')) |
- Some((pos, '}')) | Some((pos, '{')) => {
- return self.input.slice(start, pos);
- }
- Some(..) => { self.cur.next(); }
- None => {
- self.cur.next();
- return self.input.slice(start, self.input.len());
- }
- }
- }
- }
-
- /// Parses an Argument structure, or what's contained within braces inside
- /// the format string
- fn argument(&mut self) -> Argument<'a> {
- Argument {
- position: self.position(),
- format: self.format(),
- method: self.method(),
- }
- }
-
- /// Parses a positional argument for a format. This could either be an
- /// integer index of an argument, a named argument, or a blank string.
- fn position(&mut self) -> Position<'a> {
- match self.integer() {
- Some(i) => { ArgumentIs(i) }
- None => {
- match self.cur.clone().next() {
- Some((_, c)) if char::is_alphabetic(c) => {
- ArgumentNamed(self.word())
- }
- _ => ArgumentNext
- }
- }
- }
- }
-
- /// Parses a format specifier at the current position, returning all of the
- /// relevant information in the FormatSpec struct.
- fn format(&mut self) -> FormatSpec<'a> {
- let mut spec = FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: self.input.slice(0, 0),
- };
- if !self.consume(':') { return spec }
-
- // fill character
- match self.cur.clone().next() {
- Some((_, c)) => {
- match self.cur.clone().skip(1).next() {
- Some((_, '>')) | Some((_, '<')) => {
- spec.fill = Some(c);
- self.cur.next();
- }
- Some(..) | None => {}
- }
- }
- None => {}
- }
- // Alignment
- if self.consume('<') {
- spec.align = AlignLeft;
- } else if self.consume('>') {
- spec.align = AlignRight;
- }
- // Sign flags
- if self.consume('+') {
- spec.flags |= 1 << (FlagSignPlus as uint);
- } else if self.consume('-') {
- spec.flags |= 1 << (FlagSignMinus as uint);
- }
- // Alternate marker
- if self.consume('#') {
- spec.flags |= 1 << (FlagAlternate as uint);
- }
- // Width and precision
- let mut havewidth = false;
- if self.consume('0') {
- // small ambiguity with '0$' as a format string. In theory this is a
- // '0' flag and then an ill-formatted format string with just a '$'
- // and no count, but this is better if we instead interpret this as
- // no '0' flag and '0$' as the width instead.
- if self.consume('$') {
- spec.width = CountIsParam(0);
- havewidth = true;
- } else {
- spec.flags |= 1 << (FlagSignAwareZeroPad as uint);
- }
- }
- if !havewidth {
- spec.width = self.count();
- }
- if self.consume('.') {
- if self.consume('*') {
- spec.precision = CountIsNextParam;
- } else {
- spec.precision = self.count();
- }
- }
- // Finally the actual format specifier
- if self.consume('?') {
- spec.ty = "?";
- } else {
- spec.ty = self.word();
- }
- return spec;
- }
-
- /// Parses a method to be applied to the previously specified argument and
- /// its format. The two current supported methods are 'plural' and 'select'
- fn method(&mut self) -> Option<Box<Method<'a>>> {
- if !self.wsconsume(',') {
- return None;
- }
- self.ws();
- match self.word() {
- "select" => {
- self.must_consume(',');
- Some(self.select())
- }
- "plural" => {
- self.must_consume(',');
- Some(self.plural())
- }
- "" => {
- self.err("expected method after comma");
- return None;
- }
- method => {
- self.err(format!("unknown method: `{}`", method));
- return None;
- }
- }
- }
-
- /// Parses a 'select' statement (after the initial 'select' word)
- fn select(&mut self) -> Box<Method<'a>> {
- let mut other = None;
- let mut arms = vec!();
- // Consume arms one at a time
- loop {
- self.ws();
- let selector = self.word();
- if selector == "" {
- self.err("cannot have an empty selector");
- break
- }
- self.must_consume('{');
- self.depth += 1;
- let pieces = self.collect();
- self.depth -= 1;
- self.must_consume('}');
- if selector == "other" {
- if !other.is_none() {
- self.err("multiple `other` statements in `select");
- }
- other = Some(pieces);
- } else {
- arms.push(SelectArm { selector: selector, result: pieces });
- }
- self.ws();
- match self.cur.clone().next() {
- Some((_, '}')) => { break }
- Some(..) | None => {}
- }
- }
- // The "other" selector must be present
- let other = match other {
- Some(arm) => { arm }
- None => {
- self.err("`select` statement must provide an `other` case");
- vec!()
- }
- };
- box Select(arms, other)
- }
-
- /// Parses a 'plural' statement (after the initial 'plural' word)
- fn plural(&mut self) -> Box<Method<'a>> {
- let mut offset = None;
- let mut other = None;
- let mut arms = vec!();
-
- // First, attempt to parse the 'offset:' field. We know the set of
- // selector words which can appear in plural arms, and the only ones
- // which start with 'o' are "other" and "offset", hence look two
- // characters deep to see if we can consume the word "offset"
- self.ws();
- let mut it = self.cur.clone();
- match it.next() {
- Some((_, 'o')) => {
- match it.next() {
- Some((_, 'f')) => {
- let word = self.word();
- if word != "offset" {
- self.err(format!("expected `offset`, found `{}`",
- word));
- } else {
- self.must_consume(':');
- match self.integer() {
- Some(i) => { offset = Some(i); }
- None => {
- self.err("offset must be an integer");
- }
- }
- }
- }
- Some(..) | None => {}
- }
- }
- Some(..) | None => {}
- }
-
- // Next, generate all the arms
- loop {
- let mut isother = false;
- let selector = if self.wsconsume('=') {
- match self.integer() {
- Some(i) => Literal(i),
- None => {
- self.err("plural `=` selectors must be followed by an \
- integer");
- Literal(0)
- }
- }
- } else {
- let word = self.word();
- match word {
- "other" => { isother = true; Keyword(Zero) }
- "zero" => Keyword(Zero),
- "one" => Keyword(One),
- "two" => Keyword(Two),
- "few" => Keyword(Few),
- "many" => Keyword(Many),
- word => {
- self.err(format!("unexpected plural selector `{}`",
- word));
- if word == "" {
- break
- } else {
- Keyword(Zero)
- }
- }
- }
- };
- self.must_consume('{');
- self.depth += 1;
- let pieces = self.collect();
- self.depth -= 1;
- self.must_consume('}');
- if isother {
- if !other.is_none() {
- self.err("multiple `other` statements in `select");
- }
- other = Some(pieces);
- } else {
- arms.push(PluralArm { selector: selector, result: pieces });
- }
- self.ws();
- match self.cur.clone().next() {
- Some((_, '}')) => { break }
- Some(..) | None => {}
- }
- }
-
- let other = match other {
- Some(arm) => { arm }
- None => {
- self.err("`plural` statement must provide an `other` case");
- vec!()
- }
- };
- box Plural(offset, arms, other)
- }
-
- /// Parses a Count parameter at the current position. This does not check
- /// for 'CountIsNextParam' because that is only used in precision, not
- /// width.
- fn count(&mut self) -> Count<'a> {
- match self.integer() {
- Some(i) => {
- if self.consume('$') {
- CountIsParam(i)
- } else {
- CountIs(i)
- }
- }
- None => {
- let tmp = self.cur.clone();
- match self.word() {
- word if word.len() > 0 && self.consume('$') => {
- CountIsName(word)
- }
- _ => {
- self.cur = tmp;
- CountImplied
- }
- }
- }
- }
- }
-
- /// Parses a word starting at the current position. A word is considered to
- /// be an alphabetic character followed by any number of alphanumeric
- /// characters.
- fn word(&mut self) -> &'a str {
- let start = match self.cur.clone().next() {
- Some((pos, c)) if char::is_XID_start(c) => {
- self.cur.next();
- pos
- }
- Some(..) | None => { return self.input.slice(0, 0); }
- };
- let mut end;
- loop {
- match self.cur.clone().next() {
- Some((_, c)) if char::is_XID_continue(c) => {
- self.cur.next();
- }
- Some((pos, _)) => { end = pos; break }
- None => { end = self.input.len(); break }
- }
- }
- self.input.slice(start, end)
- }
-
- /// Optionally parses an integer at the current position. This doesn't deal
- /// with overflow at all, it's just accumulating digits.
- fn integer(&mut self) -> Option<uint> {
- let mut cur = 0;
- let mut found = false;
- loop {
- match self.cur.clone().next() {
- Some((_, c)) => {
- match char::to_digit(c, 10) {
- Some(i) => {
- cur = cur * 10 + i;
- found = true;
- self.cur.next();
- }
- None => { break }
- }
- }
- None => { break }
- }
- }
- if found {
- return Some(cur);
- } else {
- return None;
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use prelude::*;
-
- fn same(fmt: &'static str, p: &[Piece<'static>]) {
- let mut parser = Parser::new(fmt);
- assert!(p == parser.collect::<Vec<Piece<'static>>>().as_slice());
- }
-
- fn fmtdflt() -> FormatSpec<'static> {
- return FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- }
- }
-
- fn musterr(s: &str) {
- let mut p = Parser::new(s);
- p.next();
- assert!(p.errors.len() != 0);
- }
-
- #[test]
- fn simple() {
- same("asdf", [String("asdf")]);
- same("a\\{b", [String("a"), String("{b")]);
- same("a\\#b", [String("a"), String("#b")]);
- same("a\\}b", [String("a"), String("}b")]);
- same("a\\}", [String("a"), String("}")]);
- same("\\}", [String("}")]);
- }
-
- #[test] fn invalid01() { musterr("{") }
- #[test] fn invalid02() { musterr("\\") }
- #[test] fn invalid03() { musterr("\\a") }
- #[test] fn invalid04() { musterr("{3a}") }
- #[test] fn invalid05() { musterr("{:|}") }
- #[test] fn invalid06() { musterr("{:>>>}") }
-
- #[test]
- fn format_nothing() {
- same("{}", [Argument(Argument {
- position: ArgumentNext,
- format: fmtdflt(),
- method: None,
- })]);
- }
- #[test]
- fn format_position() {
- same("{3}", [Argument(Argument {
- position: ArgumentIs(3),
- format: fmtdflt(),
- method: None,
- })]);
- }
- #[test]
- fn format_position_nothing_else() {
- same("{3:}", [Argument(Argument {
- position: ArgumentIs(3),
- format: fmtdflt(),
- method: None,
- })]);
- }
- #[test]
- fn format_type() {
- same("{3:a}", [Argument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "a",
- },
- method: None,
- })]);
- }
- #[test]
- fn format_align_fill() {
- same("{3:>}", [Argument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: None,
- align: AlignRight,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- method: None,
- })]);
- same("{3:0<}", [Argument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: Some('0'),
- align: AlignLeft,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- method: None,
- })]);
- same("{3:*<abcd}", [Argument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: Some('*'),
- align: AlignLeft,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "abcd",
- },
- method: None,
- })]);
- }
- #[test]
- fn format_counts() {
- same("{:10s}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountIs(10),
- ty: "s",
- },
- method: None,
- })]);
- same("{:10$.10s}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIs(10),
- width: CountIsParam(10),
- ty: "s",
- },
- method: None,
- })]);
- same("{:.*s}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIsNextParam,
- width: CountImplied,
- ty: "s",
- },
- method: None,
- })]);
- same("{:.10$s}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIsParam(10),
- width: CountImplied,
- ty: "s",
- },
- method: None,
- })]);
- same("{:a$.b$s}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIsName("b"),
- width: CountIsName("a"),
- ty: "s",
- },
- method: None,
- })]);
- }
- #[test]
- fn format_flags() {
- same("{:-}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: (1 << FlagSignMinus as uint),
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- method: None,
- })]);
- same("{:+#}", [Argument(Argument {
- position: ArgumentNext,
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: (1 << FlagSignPlus as uint) | (1 << FlagAlternate as uint),
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- method: None,
- })]);
- }
- #[test]
- fn format_mixture() {
- same("abcd {3:a} efg", [String("abcd "), Argument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "a",
- },
- method: None,
- }), String(" efg")]);
- }
-
- #[test]
- fn select_simple() {
- same("{, select, other { haha } }", [Argument(Argument{
- position: ArgumentNext,
- format: fmtdflt(),
- method: Some(box Select(vec![], vec![String(" haha ")]))
- })]);
- same("{1, select, other { haha } }", [Argument(Argument{
- position: ArgumentIs(1),
- format: fmtdflt(),
- method: Some(box Select(vec![], vec![String(" haha ")]))
- })]);
- same("{1, select, other {#} }", [Argument(Argument{
- position: ArgumentIs(1),
- format: fmtdflt(),
- method: Some(box Select(vec![], vec![CurrentArgument]))
- })]);
- same("{1, select, other {{2, select, other {lol}}} }", [Argument(Argument{
- position: ArgumentIs(1),
- format: fmtdflt(),
- method: Some(box Select(vec![], vec![Argument(Argument{
- position: ArgumentIs(2),
- format: fmtdflt(),
- method: Some(box Select(vec![], vec![String("lol")]))
- })])) // wat
- })]);
- }
-
- #[test]
- fn select_cases() {
- same("{1, select, a{1} b{2} c{3} other{4} }", [Argument(Argument{
- position: ArgumentIs(1),
- format: fmtdflt(),
- method: Some(box Select(vec![
- SelectArm{ selector: "a", result: vec![String("1")] },
- SelectArm{ selector: "b", result: vec![String("2")] },
- SelectArm{ selector: "c", result: vec![String("3")] },
- ], vec![String("4")]))
- })]);
- }
-
- #[test] fn badselect01() { musterr("{select, }") }
- #[test] fn badselect02() { musterr("{1, select}") }
- #[test] fn badselect03() { musterr("{1, select, }") }
- #[test] fn badselect04() { musterr("{1, select, a {}}") }
- #[test] fn badselect05() { musterr("{1, select, other }}") }
- #[test] fn badselect06() { musterr("{1, select, other {}") }
- #[test] fn badselect07() { musterr("{select, other {}") }
- #[test] fn badselect08() { musterr("{1 select, other {}") }
- #[test] fn badselect09() { musterr("{:d select, other {}") }
- #[test] fn badselect10() { musterr("{1:d select, other {}") }
-
- #[test]
- fn plural_simple() {
- same("{, plural, other { haha } }", [Argument(Argument{
- position: ArgumentNext,
- format: fmtdflt(),
- method: Some(box Plural(None, vec![], vec![String(" haha ")]))
- })]);
- same("{:, plural, other { haha } }", [Argument(Argument{
- position: ArgumentNext,
- format: fmtdflt(),
- method: Some(box Plural(None, vec![], vec![String(" haha ")]))
- })]);
- same("{, plural, offset:1 =2{2} =3{3} many{yes} other{haha} }",
- [Argument(Argument{
- position: ArgumentNext,
- format: fmtdflt(),
- method: Some(box Plural(Some(1), vec![
- PluralArm{ selector: Literal(2), result: vec![String("2")] },
- PluralArm{ selector: Literal(3), result: vec![String("3")] },
- PluralArm{ selector: Keyword(Many), result: vec![String("yes")] }
- ], vec![String("haha")]))
- })]);
- }
-}
#![allow(missing_doc)]
#![doc(hidden)]
-use fmt::parse;
use option::Option;
+#[cfg(stage0)]
+pub use fmt::parse::{Alignment, AlignLeft, AlignRight, AlignUnknown};
+#[cfg(stage0)]
+pub use fmt::parse::{PluralKeyword, Zero, One, Two, Few, Many};
+#[cfg(stage0)]
+pub use fmt::parse::{Flag, FlagSignPlus, FlagSignMinus, FlagSignAwareZeroPad};
+#[cfg(stage0)]
+pub use fmt::parse::{FlagAlternate};
+
pub enum Piece<'a> {
String(&'a str),
// FIXME(#8259): this shouldn't require the unit-value here
pub struct FormatSpec {
pub fill: char,
- pub align: parse::Alignment,
+ pub align: Alignment,
pub flags: uint,
pub precision: Count,
pub width: Count,
}
+#[cfg(not(stage0))]
+#[deriving(Eq)]
+pub enum Alignment {
+ AlignLeft,
+ AlignRight,
+ AlignUnknown,
+}
+
pub enum Count {
CountIs(uint), CountIsParam(uint), CountIsNextParam, CountImplied,
}
ArgumentNext, ArgumentIs(uint)
}
+#[cfg(not(stage0))]
+pub enum Flag {
+ FlagSignPlus,
+ FlagSignMinus,
+ FlagAlternate,
+ FlagSignAwareZeroPad,
+}
+
pub enum Method<'a> {
Plural(Option<uint>, &'a [PluralArm<'a>], &'a [Piece<'a>]),
Select(&'a [SelectArm<'a>], &'a [Piece<'a>]),
}
pub enum PluralSelector {
- Keyword(parse::PluralKeyword),
+ Keyword(PluralKeyword),
Literal(uint),
}
+pub enum PluralKeyword {
+ Zero,
+ One,
+ Two,
+ Few,
+ Many,
+}
+
pub struct PluralArm<'a> {
pub selector: PluralSelector,
pub result: &'a [Piece<'a>],
let path = Path::new("message.txt");
let mut file = BufferedReader::new(File::open(&path));
- let lines: ~[~str] = file.lines().map(|x| x.unwrap()).collect();
+ let lines: Vec<~str> = file.lines().map(|x| x.unwrap()).collect();
```
* Make a simple TCP client connection and request
use io::net::ip::{SocketAddr, IpAddr};
use option::{Option, Some, None};
use rt::rtio::{IoFactory, LocalIo};
-use slice::OwnedVector;
+use vec::Vec;
/// Hints to the types of sockets that are desired when looking up hosts
pub enum SocketType {
/// Easy name resolution. Given a hostname, returns the list of IP addresses for
/// that hostname.
-pub fn get_host_addresses(host: &str) -> IoResult<~[IpAddr]> {
+pub fn get_host_addresses(host: &str) -> IoResult<Vec<IpAddr>> {
lookup(Some(host), None, None).map(|a| a.move_iter().map(|i| i.address.ip).collect())
}
/// FIXME: this is not public because the `Hint` structure is not ready for public
/// consumption just yet.
fn lookup(hostname: Option<&str>, servname: Option<&str>, hint: Option<Hint>)
- -> IoResult<~[Info]> {
+ -> IoResult<Vec<Info>> {
LocalIo::maybe_raise(|io| io.get_host_addresses(hostname, servname, hint))
}
///
/// For clarification on the semantics of interrupting a read and a write,
/// take a look at `set_read_timeout` and `set_write_timeout`.
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_timeout(timeout_ms)
}
/// action is taken. Otherwise, the read operation will be scheduled to
/// promptly return. If a timeout error is returned, then no data was read
/// during the timeout period.
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_read_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_read_timeout(timeout_ms)
}
/// does not know how many bytes were written as part of the timeout
/// operation. It may be the case that bytes continue to be written in an
/// asynchronous fashion after the call to write returns.
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_write_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_write_timeout(timeout_ms)
}
/// Sets the read/write timeout for this socket.
///
/// For more information, see `TcpStream::set_timeout`
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_timeout(timeout_ms)
}
/// Sets the read timeout for this socket.
///
/// For more information, see `TcpStream::set_timeout`
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_read_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_read_timeout(timeout_ms)
}
/// Sets the write timeout for this socket.
///
/// For more information, see `TcpStream::set_timeout`
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_write_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_write_timeout(timeout_ms)
}
/// Sets the read/write timeout for this socket.
///
/// For more information, see `TcpStream::set_timeout`
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_timeout(timeout_ms)
}
/// Sets the read timeout for this socket.
///
/// For more information, see `TcpStream::set_timeout`
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_read_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_read_timeout(timeout_ms)
}
/// Sets the write timeout for this socket.
///
/// For more information, see `TcpStream::set_timeout`
+ #[experimental = "the timeout argument may change in type and value"]
pub fn set_write_timeout(&mut self, timeout_ms: Option<u64>) {
self.obj.set_write_timeout(timeout_ms)
}
/// Extra I/O handles as configured by the original `ProcessConfig` when
/// this process was created. This is by default empty.
- pub extra_io: ~[Option<io::PipeStream>],
+ pub extra_io: Vec<Option<io::PipeStream>>,
}
/// This configuration describes how a new process should be spawned. A blank
drop(self.stdin.take());
drop(self.stdout.take());
drop(self.stderr.take());
- drop(mem::replace(&mut self.extra_io, box []));
+ drop(mem::replace(&mut self.extra_io, Vec::new()));
self.wait();
}
//! memory types, including [`atomics`](sync/atomics/index.html).
//!
//! Common types of I/O, including files, TCP, UPD, pipes, Unix domain sockets,
-//! timers, and process spawning, are defined in the [`io`](io/index.html).
+//! timers, and process spawning, are defined in the [`io`](io/index.html) module.
//!
//! Rust's I/O and concurrency depends on a small runtime interface
//! that lives, along with its support code, in mod [`rt`](rt/index.html).
//!
//! ## The Rust prelude and macros
//!
-//! Finally, the [`prelude`](prelude/index.html) defines a set of
+//! Finally, the [`prelude`](prelude/index.html) defines a
//! common set of traits, types, and functions that are made available
//! to all code by default. [`macros`](macros/index.html) contains
-//! all the standard macros, such as `assert!`, `fail!`, `println!`.
+//! all the standard macros, such as `assert!`, `fail!`, `println!`,
+//! and `format!`, also available to all Rust code.
#![crate_id = "std#0.11-pre"]
#![comment = "The Rust standard library"]
pub use ty;
pub use unstable;
pub use vec;
+
+ // The test runner requires std::slice::Vector, so re-export std::slice just for it.
+ #[cfg(test)] pub use slice;
}
//! Operations and constants for signed 16-bits integers (`i16` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::i16::{BITS, BYTES, MIN, MAX};
//! Operations and constants for signed 32-bits integers (`i32` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::i32::{BITS, BYTES, MIN, MAX};
//! Operations and constants for signed 64-bits integers (`i64` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::i64::{BITS, BYTES, MIN, MAX};
//! Operations and constants for signed 8-bits integers (`i8` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::i8::{BITS, BYTES, MIN, MAX};
//! Operations and constants for architecture-sized signed integers (`int` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::int::{BITS, BYTES, MIN, MAX};
/// Convert to a string in a given base.
#[inline]
fn to_str_radix(&self, radix: uint) -> ~str {
+ use slice::Vector;
+ use str::StrAllocating;
+
let mut buf = ::vec::Vec::new();
strconv::int_to_str_bytes_common(*self, radix, strconv::SignNeg, |i| {
buf.push(i);
});
// We know we generated valid utf-8, so we don't need to go through that
// check.
- unsafe { str::raw::from_utf8_owned(buf.move_iter().collect()) }
+ unsafe { str::raw::from_utf8(buf.as_slice()).to_owned() }
}
}
use num;
use ops::{Add, Sub, Mul, Div, Rem, Neg};
use option::{None, Option, Some};
-use slice::OwnedVector;
use slice::{CloneableVector, ImmutableVector, MutableVector};
use std::cmp::{Ord, Eq};
-use str::{StrSlice};
-use str;
+use str::{StrAllocating, StrSlice};
+use strbuf::StrBuf;
use vec::Vec;
/// A flag that specifies whether to use exponential (scientific) notation.
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits, exp_format: ExponentFormat, exp_upper: bool
- ) -> (~[u8], bool) {
+ ) -> (Vec<u8>, bool) {
assert!(2 <= radix && radix <= 36);
match exp_format {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
let _1: T = One::one();
match num.classify() {
- FPNaN => { return ("NaN".as_bytes().to_owned(), true); }
+ FPNaN => { return (Vec::from_slice("NaN".as_bytes()), true); }
FPInfinite if num > _0 => {
return match sign {
- SignAll => ("+inf".as_bytes().to_owned(), true),
- _ => ("inf".as_bytes().to_owned(), true)
+ SignAll => (Vec::from_slice("+inf".as_bytes()), true),
+ _ => (Vec::from_slice("inf".as_bytes()), true)
};
}
FPInfinite if num < _0 => {
return match sign {
- SignNone => ("inf".as_bytes().to_owned(), true),
- _ => ("-inf".as_bytes().to_owned(), true),
+ SignNone => (Vec::from_slice("inf".as_bytes()), true),
+ _ => (Vec::from_slice("-inf".as_bytes()), true),
};
}
_ => {}
}
}
- (buf.move_iter().collect(), false)
+ (buf, false)
}
/**
) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits, exp_format, exp_capital);
- (str::from_utf8_owned(bytes).unwrap(), special)
+ (StrBuf::from_utf8(bytes).unwrap().into_owned(), special)
}
// Some constants for from_str_bytes_common's input validation,
//! Operations and constants for unsigned 16-bits integers (`u16` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::u16::{BITS, BYTES, MIN, MAX};
//! Operations and constants for unsigned 32-bits integers (`u32` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::u32::{BITS, BYTES, MIN, MAX};
//! Operations and constants for unsigned 64-bits integer (`u64` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::u64::{BITS, BYTES, MIN, MAX};
//! Operations and constants for unsigned 8-bits integers (`u8` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::u8::{BITS, BYTES, MIN, MAX};
//! Operations and constants for architecture-sized unsigned integers (`uint` type)
use from_str::FromStr;
-use iter::Iterator;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
-use slice::{ImmutableVector, OwnedVector};
+use slice::ImmutableVector;
use str;
pub use core::uint::{BITS, BYTES, MIN, MAX};
/// Convert to a string in a given base.
#[inline]
fn to_str_radix(&self, radix: uint) -> ~str {
+ use slice::Vector;
+ use str::StrAllocating;
+
let mut buf = ::vec::Vec::new();
strconv::int_to_str_bytes_common(*self, radix, strconv::SignNone, |i| {
buf.push(i);
});
// We know we generated valid utf-8, so we don't need to go through that
// check.
- unsafe { str::raw::from_utf8_owned(buf.move_iter().collect()) }
+ unsafe { str::raw::from_utf8(buf.as_slice()).to_owned() }
}
}
///
/// Invalid UTF-8 bytes are replaced with \uFFFD. See `str::from_utf8_lossy()`
/// for details.
-pub fn env() -> ~[(~str,~str)] {
+pub fn env() -> Vec<(~str,~str)> {
env_as_bytes().move_iter().map(|(k,v)| {
let k = str::from_utf8_lossy(k).into_owned();
let v = str::from_utf8_lossy(v).into_owned();
/// Returns a vector of (variable, value) byte-vector pairs for all the
/// environment variables of the current process.
-pub fn env_as_bytes() -> ~[(~[u8],~[u8])] {
+pub fn env_as_bytes() -> Vec<(~[u8],~[u8])> {
unsafe {
#[cfg(windows)]
unsafe fn get_env_pairs() -> Vec<~[u8]> {
fn env_convert(input: Vec<~[u8]>) -> Vec<(~[u8], ~[u8])> {
let mut pairs = Vec::new();
for p in input.iter() {
- let vs: ~[&[u8]] = p.splitn(1, |b| *b == '=' as u8).collect();
- let key = vs[0].to_owned();
- let val = if vs.len() < 2 { box [] } else { vs[1].to_owned() };
+ let mut it = p.splitn(1, |b| *b == '=' as u8);
+ let key = it.next().unwrap().to_owned();
+ let val = it.next().unwrap_or(&[]).to_owned();
pairs.push((key, val));
}
pairs
}
with_env_lock(|| {
let unparsed_environ = get_env_pairs();
- env_convert(unparsed_environ).move_iter().collect()
+ env_convert(unparsed_environ)
})
}
}
pub fn self_exe_name() -> Option<Path> {
#[cfg(target_os = "freebsd")]
- fn load_self() -> Option<~[u8]> {
+ fn load_self() -> Option<Vec<u8>> {
unsafe {
use libc::funcs::bsd44::*;
use libc::consts::os::extra::*;
if err != 0 { return None; }
if sz == 0 { return None; }
v.set_len(sz as uint - 1); // chop off trailing NUL
- Some(v.move_iter().collect())
+ Some(v)
}
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
- fn load_self() -> Option<~[u8]> {
+ fn load_self() -> Option<Vec<u8>> {
use std::io;
match io::fs::readlink(&Path::new("/proc/self/exe")) {
- Ok(path) => Some(path.as_vec().to_owned()),
+ Ok(path) => Some(path.into_vec()),
Err(..) => None
}
}
#[cfg(target_os = "macos")]
- fn load_self() -> Option<~[u8]> {
+ fn load_self() -> Option<Vec<u8>> {
unsafe {
use libc::funcs::extra::_NSGetExecutablePath;
let mut sz: u32 = 0;
let err = _NSGetExecutablePath(v.as_mut_ptr() as *mut i8, &mut sz);
if err != 0 { return None; }
v.set_len(sz as uint - 1); // chop off trailing NUL
- Some(v.move_iter().collect())
+ Some(v)
}
}
#[cfg(windows)]
- fn load_self() -> Option<~[u8]> {
+ fn load_self() -> Option<Vec<u8>> {
use str::OwnedStr;
unsafe {
use os::win32::fill_utf16_buf_and_decode;
fill_utf16_buf_and_decode(|buf, sz| {
libc::GetModuleFileNameW(0u as libc::DWORD, buf, sz)
- }).map(|s| s.into_bytes())
+ }).map(|s| s.into_strbuf().into_bytes())
}
}
}
#[cfg(target_os = "macos")]
-unsafe fn load_argc_and_argv(argc: int, argv: **c_char) -> ~[~[u8]] {
+unsafe fn load_argc_and_argv(argc: int, argv: **c_char) -> Vec<~[u8]> {
use c_str::CString;
Vec::from_fn(argc as uint, |i| {
CString::new(*argv.offset(i as int), false).as_bytes_no_nul().to_owned()
- }).move_iter().collect()
+ })
}
/**
* Returns a list of the command line arguments.
*/
#[cfg(target_os = "macos")]
-fn real_args_as_bytes() -> ~[~[u8]] {
+fn real_args_as_bytes() -> Vec<~[u8]> {
unsafe {
let (argc, argv) = (*_NSGetArgc() as int,
*_NSGetArgv() as **c_char);
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
#[cfg(target_os = "freebsd")]
-fn real_args_as_bytes() -> ~[~[u8]] {
+fn real_args_as_bytes() -> Vec<~[u8]> {
use rt;
match rt::args::clone() {
}
#[cfg(not(windows))]
-fn real_args() -> ~[~str] {
+fn real_args() -> Vec<~str> {
real_args_as_bytes().move_iter().map(|v| str::from_utf8_lossy(v).into_owned()).collect()
}
#[cfg(windows)]
-fn real_args() -> ~[~str] {
+fn real_args() -> Vec<~str> {
use slice;
use option::Expect;
LocalFree(szArgList as *c_void);
}
- return args.move_iter().collect();
+ return args
}
#[cfg(windows)]
-fn real_args_as_bytes() -> ~[~[u8]] {
+fn real_args_as_bytes() -> Vec<~[u8]> {
real_args().move_iter().map(|s| s.into_bytes()).collect()
}
///
/// The arguments are interpreted as utf-8, with invalid bytes replaced with \uFFFD.
/// See `str::from_utf8_lossy` for details.
-pub fn args() -> ~[~str] {
+pub fn args() -> Vec<~str> {
real_args()
}
/// Returns the arguments which this program was started with (normally passed
/// via the command line) as byte vectors.
-pub fn args_as_bytes() -> ~[~[u8]] {
+pub fn args_as_bytes() -> Vec<~[u8]> {
real_args_as_bytes()
}
//! FIXME #7756: This has a lot of C glue for lack of globals.
use option::Option;
+use vec::Vec;
#[cfg(test)] use option::{Some, None};
#[cfg(test)] use realstd;
#[cfg(test)] use realargs = realstd::rt::args;
#[cfg(test)] pub unsafe fn cleanup() { realargs::cleanup() }
/// Take the global arguments from global storage.
-#[cfg(not(test))] pub fn take() -> Option<~[~[u8]]> { imp::take() }
-#[cfg(test)] pub fn take() -> Option<~[~[u8]]> {
+#[cfg(not(test))] pub fn take() -> Option<Vec<~[u8]>> { imp::take() }
+#[cfg(test)] pub fn take() -> Option<Vec<~[u8]>> {
match realargs::take() {
- realstd::option::Some(a) => Some(a),
+ realstd::option::Some(v) => Some(unsafe{ ::cast::transmute(v) }),
realstd::option::None => None,
}
}
/// Give the global arguments to global storage.
///
/// It is an error if the arguments already exist.
-#[cfg(not(test))] pub fn put(args: ~[~[u8]]) { imp::put(args) }
-#[cfg(test)] pub fn put(args: ~[~[u8]]) { realargs::put(args) }
+#[cfg(not(test))] pub fn put(args: Vec<~[u8]>) { imp::put(args) }
+#[cfg(test)] pub fn put(args: Vec<~[u8]>) { realargs::put(unsafe { ::cast::transmute(args) }) }
/// Make a clone of the global arguments.
-#[cfg(not(test))] pub fn clone() -> Option<~[~[u8]]> { imp::clone() }
-#[cfg(test)] pub fn clone() -> Option<~[~[u8]]> {
+#[cfg(not(test))] pub fn clone() -> Option<Vec<~[u8]>> { imp::clone() }
+#[cfg(test)] pub fn clone() -> Option<Vec<~[u8]>> {
match realargs::clone() {
- realstd::option::Some(a) => Some(a),
+ realstd::option::Some(v) => Some(unsafe { ::cast::transmute(v) }),
realstd::option::None => None,
}
}
use owned::Box;
use unstable::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT};
use mem;
+ use vec::Vec;
#[cfg(not(test))] use ptr::RawPtr;
static mut global_args_ptr: uint = 0;
lock.destroy();
}
- pub fn take() -> Option<~[~[u8]]> {
+ pub fn take() -> Option<Vec<~[u8]>> {
with_lock(|| unsafe {
let ptr = get_global_ptr();
let val = mem::replace(&mut *ptr, None);
- val.as_ref().map(|s: &Box<~[~[u8]]>| (**s).clone())
+ val.as_ref().map(|s: &Box<Vec<~[u8]>>| (**s).clone())
})
}
- pub fn put(args: ~[~[u8]]) {
+ pub fn put(args: Vec<~[u8]>) {
with_lock(|| unsafe {
let ptr = get_global_ptr();
rtassert!((*ptr).is_none());
})
}
- pub fn clone() -> Option<~[~[u8]]> {
+ pub fn clone() -> Option<Vec<~[u8]>> {
with_lock(|| unsafe {
let ptr = get_global_ptr();
- (*ptr).as_ref().map(|s: &Box<~[~[u8]]>| (**s).clone())
+ (*ptr).as_ref().map(|s: &Box<Vec<~[u8]>>| (**s).clone())
})
}
}
}
- fn get_global_ptr() -> *mut Option<Box<~[~[u8]]>> {
+ fn get_global_ptr() -> *mut Option<Box<Vec<~[u8]>>> {
unsafe { cast::transmute(&global_args_ptr) }
}
// Copied from `os`.
#[cfg(not(test))]
- unsafe fn load_argc_and_argv(argc: int, argv: **u8) -> ~[~[u8]] {
+ unsafe fn load_argc_and_argv(argc: int, argv: **u8) -> Vec<~[u8]> {
use c_str::CString;
use ptr::RawPtr;
use libc;
Vec::from_fn(argc as uint, |i| {
let cs = CString::new(*(argv as **libc::c_char).offset(i as int), false);
cs.as_bytes_no_nul().to_owned()
- }).move_iter().collect()
+ })
}
#[cfg(test)]
// Preserve the actual global state.
let saved_value = take();
- let expected = box [bytes!("happy").to_owned(), bytes!("today?").to_owned()];
+ let expected = vec![bytes!("happy").to_owned(), bytes!("today?").to_owned()];
put(expected.clone());
assert!(clone() == Some(expected.clone()));
#[cfg(target_os = "win32", not(test))]
mod imp {
use option::Option;
+ use vec::Vec;
pub unsafe fn init(_argc: int, _argv: **u8) {
}
pub fn cleanup() {
}
- pub fn take() -> Option<~[~[u8]]> {
+ pub fn take() -> Option<Vec<~[u8]>> {
fail!()
}
- pub fn put(_args: ~[~[u8]]) {
+ pub fn put(_args: Vec<~[u8]>) {
fail!()
}
- pub fn clone() -> Option<~[~[u8]]> {
+ pub fn clone() -> Option<Vec<~[u8]>> {
fail!()
}
}
fn unix_connect(&mut self, path: &CString,
timeout: Option<u64>) -> IoResult<Box<RtioPipe:Send>>;
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
- hint: Option<ai::Hint>) -> IoResult<~[ai::Info]>;
+ hint: Option<ai::Hint>) -> IoResult<Vec<ai::Info>>;
// filesystem operations
fn fs_from_raw_fd(&mut self, fd: c_int, close: CloseBehavior)
fn timer_init(&mut self) -> IoResult<Box<RtioTimer:Send>>;
fn spawn(&mut self, config: ProcessConfig)
-> IoResult<(Box<RtioProcess:Send>,
- ~[Option<Box<RtioPipe:Send>>])>;
+ Vec<Option<Box<RtioPipe:Send>>>)>;
fn kill(&mut self, pid: libc::pid_t, signal: int) -> IoResult<()>;
fn pipe_open(&mut self, fd: c_int) -> IoResult<Box<RtioPipe:Send>>;
fn tty_open(&mut self, fd: c_int, readable: bool)
// FIXME #5898: calling these .concat and .connect conflicts with
// StrVector::con{cat,nect}, since they have generic contents.
/// Flattens a vector of vectors of T into a single vector of T.
- fn concat_vec(&self) -> ~[T];
+ fn concat_vec(&self) -> Vec<T>;
/// Concatenate a vector of vectors, placing a given separator between each.
- fn connect_vec(&self, sep: &T) -> ~[T];
+ fn connect_vec(&self, sep: &T) -> Vec<T>;
}
impl<'a, T: Clone, V: Vector<T>> VectorVector<T> for &'a [V] {
- fn concat_vec(&self) -> ~[T] {
+ fn concat_vec(&self) -> Vec<T> {
let size = self.iter().fold(0u, |acc, v| acc + v.as_slice().len());
let mut result = Vec::with_capacity(size);
for v in self.iter() {
result.push_all(v.as_slice())
}
- result.move_iter().collect()
+ result
}
- fn connect_vec(&self, sep: &T) -> ~[T] {
+ fn connect_vec(&self, sep: &T) -> Vec<T> {
let size = self.iter().fold(0u, |acc, v| acc + v.as_slice().len());
let mut result = Vec::with_capacity(size + self.len());
let mut first = true;
if first { first = false } else { result.push(sep.clone()) }
result.push_all(v.as_slice())
}
- result.move_iter().collect()
+ result
}
}
-/**
- * Convert an iterator of pairs into a pair of vectors.
- *
- * Returns a tuple containing two vectors where the i-th element of the first
- * vector contains the first element of the i-th tuple of the input iterator,
- * and the i-th element of the second vector contains the second element
- * of the i-th tuple of the input iterator.
- */
-pub fn unzip<T, U, V: Iterator<(T, U)>>(mut iter: V) -> (~[T], ~[U]) {
- let (lo, _) = iter.size_hint();
- let mut ts = Vec::with_capacity(lo);
- let mut us = Vec::with_capacity(lo);
- for (t, u) in iter {
- ts.push(t);
- us.push(u);
- }
- (ts.move_iter().collect(), us.move_iter().collect())
-}
-
/// An Iterator that yields the element swaps needed to produce
/// a sequence of all possible permutations for an indexed sequence of
/// elements. Each permutation is only a single swap apart.
/// The last generated swap is always (0, 1), and it returns the
/// sequence to its initial order.
pub struct ElementSwaps {
- sdir: ~[SizeDirection],
+ sdir: Vec<SizeDirection>,
/// If true, emit the last swap that returns the sequence to initial state
emit_reset: bool,
swaps_made : uint,
// element (equal to the original index).
ElementSwaps{
emit_reset: true,
- sdir: range(0, length)
- .map(|i| SizeDirection{ size: i, dir: Neg })
- .collect::<~[_]>(),
+ sdir: range(0, length).map(|i| SizeDirection{ size: i, dir: Neg }).collect(),
swaps_made: 0
}
}
let max = self.sdir.iter().map(|&x| x).enumerate()
.filter(|&(i, sd)|
new_pos(i, sd.dir) < self.sdir.len() &&
- self.sdir[new_pos(i, sd.dir)].size < sd.size)
+ self.sdir.get(new_pos(i, sd.dir)).size < sd.size)
.max_by(|&(_, sd)| sd.size);
match max {
Some((i, sd)) => {
let j = new_pos(i, sd.dir);
- self.sdir.swap(i, j);
+ self.sdir.as_mut_slice().swap(i, j);
// Swap the direction of each larger SizeDirection
for x in self.sdir.mut_iter() {
/// Returns a copy of `v`.
#[inline]
fn to_owned(&self) -> ~[T] {
+ use RawVec = core::raw::Vec;
+ use rt::global_heap::{malloc_raw, exchange_free};
+ use num::{CheckedAdd, CheckedMul};
+ use option::Expect;
+
let len = self.len();
- let mut result = Vec::with_capacity(len);
- // Unsafe code so this can be optimised to a memcpy (or something
- // similarly fast) when T is Copy. LLVM is easily confused, so any
- // extra operations during the loop can prevent this optimisation
+ let data_size = len.checked_mul(&mem::size_of::<T>());
+ let data_size = data_size.expect("overflow in to_owned()");
+ let size = mem::size_of::<RawVec<()>>().checked_add(&data_size);
+ let size = size.expect("overflow in to_owned()");
+
unsafe {
+ let ret = malloc_raw(size) as *mut RawVec<()>;
+
+ (*ret).fill = len * mem::nonzero_size_of::<T>();
+ (*ret).alloc = len * mem::nonzero_size_of::<T>();
+
+ // Be careful with the following loop. We want it to be optimized
+ // to a memcpy (or something similarly fast) when T is Copy. LLVM
+ // is easily confused, so any extra operations during the loop can
+ // prevent this optimization.
let mut i = 0;
- let p = result.as_mut_ptr();
- // Use try_finally here otherwise the write to length
- // inside the loop stops LLVM from optimising this.
+ let p = &mut (*ret).data as *mut _ as *mut T;
try_finally(
&mut i, (),
|i, ()| while *i < len {
self.unsafe_ref(*i).clone());
*i += 1;
},
- |i| result.set_len(*i));
+ |i| if *i < len {
+ // we must be failing, clean up after ourselves
+ for j in range(0, *i as int) {
+ ptr::read(&*p.offset(j));
+ }
+ exchange_free(ret as *u8);
+ });
+ cast::transmute(ret)
}
- result.move_iter().collect()
}
#[inline(always)]
pub trait ImmutableCloneableVector<T> {
/// Partitions the vector into two vectors `(A,B)`, where all
/// elements of `A` satisfy `f` and all elements of `B` do not.
- fn partitioned(&self, f: |&T| -> bool) -> (~[T], ~[T]);
+ fn partitioned(&self, f: |&T| -> bool) -> (Vec<T>, Vec<T>);
/// Create an iterator that yields every possible permutation of the
/// vector in succession.
impl<'a,T:Clone> ImmutableCloneableVector<T> for &'a [T] {
#[inline]
- fn partitioned(&self, f: |&T| -> bool) -> (~[T], ~[T]) {
+ fn partitioned(&self, f: |&T| -> bool) -> (Vec<T>, Vec<T>) {
let mut lefts = Vec::new();
let mut rights = Vec::new();
}
}
- (lefts.move_iter().collect(), rights.move_iter().collect())
+ (lefts, rights)
}
fn permutations(self) -> Permutations<T> {
* Partitions the vector into two vectors `(A,B)`, where all
* elements of `A` satisfy `f` and all elements of `B` do not.
*/
- fn partition(self, f: |&T| -> bool) -> (~[T], ~[T]);
+ fn partition(self, f: |&T| -> bool) -> (Vec<T>, Vec<T>);
}
impl<T> OwnedVector<T> for ~[T] {
}
#[inline]
- fn partition(self, f: |&T| -> bool) -> (~[T], ~[T]) {
+ fn partition(self, f: |&T| -> bool) -> (Vec<T>, Vec<T>) {
let mut lefts = Vec::new();
let mut rights = Vec::new();
}
}
- (lefts.move_iter().collect(), rights.move_iter().collect())
+ (lefts, rights)
}
}
}
}
-/**
-* Constructs a vector from an unsafe pointer to a buffer
-*
-* # Arguments
-*
-* * ptr - An unsafe pointer to a buffer of `T`
-* * elts - The number of elements in the buffer
-*/
-// Wrapper for fn in raw: needs to be called by net_tcp::on_tcp_read_cb
-pub unsafe fn from_buf<T>(ptr: *T, elts: uint) -> ~[T] {
- raw::from_buf_raw(ptr, elts)
-}
-
/// Unsafe operations
pub mod raw {
- use iter::Iterator;
- use ptr;
- use slice::{MutableVector, OwnedVector};
- use vec::Vec;
-
pub use core::slice::raw::{buf_as_slice, mut_buf_as_slice};
pub use core::slice::raw::{shift_ptr, pop_ptr};
-
- /**
- * Constructs a vector from an unsafe pointer to a buffer
- *
- * # Arguments
- *
- * * ptr - An unsafe pointer to a buffer of `T`
- * * elts - The number of elements in the buffer
- */
- // Was in raw, but needs to be called by net_tcp::on_tcp_read_cb
- #[inline]
- pub unsafe fn from_buf_raw<T>(ptr: *T, elts: uint) -> ~[T] {
- let mut dst = Vec::with_capacity(elts);
- dst.set_len(elts);
- ptr::copy_memory(dst.as_mut_ptr(), ptr, elts);
- dst.move_iter().collect()
- }
}
/// An iterator that moves out of a vector.
fn is_odd(n: &uint) -> bool { *n % 2u == 1u }
- #[test]
- fn test_unsafe_ptrs() {
- unsafe {
- // Test on-stack copy-from-buf.
- let a = box [1, 2, 3];
- let mut ptr = a.as_ptr();
- let b = from_buf(ptr, 3u);
- assert_eq!(b.len(), 3u);
- assert_eq!(b[0], 1);
- assert_eq!(b[1], 2);
- assert_eq!(b[2], 3);
-
- // Test on-heap copy-from-buf.
- let c = box [1, 2, 3, 4, 5];
- ptr = c.as_ptr();
- let d = from_buf(ptr, 5u);
- assert_eq!(d.len(), 5u);
- assert_eq!(d[0], 1);
- assert_eq!(d[1], 2);
- assert_eq!(d[2], 3);
- assert_eq!(d[3], 4);
- assert_eq!(d[4], 5);
- }
- }
-
#[test]
fn test_from_fn() {
// Test on-stack from_fn.
assert_eq!(v, vec![1, 3, 5]);
}
- #[test]
- fn test_zip_unzip() {
- let z1 = vec![(1, 4), (2, 5), (3, 6)];
-
- let (left, right) = unzip(z1.iter().map(|&x| x));
-
- assert_eq!((1, 4), (left[0], right[0]));
- assert_eq!((2, 5), (left[1], right[1]));
- assert_eq!((3, 6), (left[2], right[2]));
- }
-
#[test]
fn test_element_swaps() {
let mut v = [1, 2, 3];
let n = task_rng().gen::<uint>() % 10;
counts[n] += 1;
(n, counts[n])
- }).collect::<~[(uint, int)]>();
+ }).collect::<Vec<(uint, int)>>();
// only sort on the first element, so an unstable sort
// may mix up the counts.
// will need to be ordered with increasing
// counts... i.e. exactly asserting that this sort is
// stable.
- assert!(v.windows(2).all(|w| w[0] <= w[1]));
+ assert!(v.as_slice().windows(2).all(|w| w[0] <= w[1]));
}
}
}
#[test]
fn test_partition() {
- assert_eq!((box []).partition(|x: &int| *x < 3), (box [], box []));
- assert_eq!((box [1, 2, 3]).partition(|x: &int| *x < 4), (box [1, 2, 3], box []));
- assert_eq!((box [1, 2, 3]).partition(|x: &int| *x < 2), (box [1], box [2, 3]));
- assert_eq!((box [1, 2, 3]).partition(|x: &int| *x < 0), (box [], box [1, 2, 3]));
+ assert_eq!((box []).partition(|x: &int| *x < 3), (vec![], vec![]));
+ assert_eq!((box [1, 2, 3]).partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
+ assert_eq!((box [1, 2, 3]).partition(|x: &int| *x < 2), (vec![1], vec![2, 3]));
+ assert_eq!((box [1, 2, 3]).partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
}
#[test]
fn test_partitioned() {
- assert_eq!(([]).partitioned(|x: &int| *x < 3), (box [], box []))
- assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 4), (box [1, 2, 3], box []));
- assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 2), (box [1], box [2, 3]));
- assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 0), (box [], box [1, 2, 3]));
+ assert_eq!(([]).partitioned(|x: &int| *x < 3), (vec![], vec![]));
+ assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
+ assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 2), (vec![1], vec![2, 3]));
+ assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
}
#[test]
fn test_concat() {
let v: [~[int], ..0] = [];
- assert_eq!(v.concat_vec(), box []);
- assert_eq!([box [1], box [2,3]].concat_vec(), box [1, 2, 3]);
+ assert_eq!(v.concat_vec(), vec![]);
+ assert_eq!([box [1], box [2,3]].concat_vec(), vec![1, 2, 3]);
- assert_eq!([&[1], &[2,3]].concat_vec(), box [1, 2, 3]);
+ assert_eq!([&[1], &[2,3]].concat_vec(), vec![1, 2, 3]);
}
#[test]
fn test_connect() {
let v: [~[int], ..0] = [];
- assert_eq!(v.connect_vec(&0), box []);
- assert_eq!([box [1], box [2, 3]].connect_vec(&0), box [1, 0, 2, 3]);
- assert_eq!([box [1], box [2], box [3]].connect_vec(&0), box [1, 0, 2, 0, 3]);
+ assert_eq!(v.connect_vec(&0), vec![]);
+ assert_eq!([box [1], box [2, 3]].connect_vec(&0), vec![1, 0, 2, 3]);
+ assert_eq!([box [1], box [2], box [3]].connect_vec(&0), vec![1, 0, 2, 0, 3]);
- assert_eq!(v.connect_vec(&0), box []);
- assert_eq!([&[1], &[2, 3]].connect_vec(&0), box [1, 0, 2, 3]);
- assert_eq!([&[1], &[2], &[3]].connect_vec(&0), box [1, 0, 2, 0, 3]);
+ assert_eq!([&[1], &[2, 3]].connect_vec(&0), vec![1, 0, 2, 3]);
+ assert_eq!([&[1], &[2], &[3]].connect_vec(&0), vec![1, 0, 2, 0, 3]);
}
#[test]
fn test_splitator() {
let xs = &[1i,2,3,4,5];
- assert_eq!(xs.split(|x| *x % 2 == 0).collect::<~[&[int]]>(),
- box [&[1], &[3], &[5]]);
- assert_eq!(xs.split(|x| *x == 1).collect::<~[&[int]]>(),
- box [&[], &[2,3,4,5]]);
- assert_eq!(xs.split(|x| *x == 5).collect::<~[&[int]]>(),
- box [&[1,2,3,4], &[]]);
- assert_eq!(xs.split(|x| *x == 10).collect::<~[&[int]]>(),
- box [&[1,2,3,4,5]]);
- assert_eq!(xs.split(|_| true).collect::<~[&[int]]>(),
- box [&[], &[], &[], &[], &[], &[]]);
+ assert_eq!(xs.split(|x| *x % 2 == 0).collect::<Vec<&[int]>>().as_slice(),
+ &[&[1], &[3], &[5]]);
+ assert_eq!(xs.split(|x| *x == 1).collect::<Vec<&[int]>>().as_slice(),
+ &[&[], &[2,3,4,5]]);
+ assert_eq!(xs.split(|x| *x == 5).collect::<Vec<&[int]>>().as_slice(),
+ &[&[1,2,3,4], &[]]);
+ assert_eq!(xs.split(|x| *x == 10).collect::<Vec<&[int]>>().as_slice(),
+ &[&[1,2,3,4,5]]);
+ assert_eq!(xs.split(|_| true).collect::<Vec<&[int]>>().as_slice(),
+ &[&[], &[], &[], &[], &[], &[]]);
let xs: &[int] = &[];
- assert_eq!(xs.split(|x| *x == 5).collect::<~[&[int]]>(), box [&[]]);
+ assert_eq!(xs.split(|x| *x == 5).collect::<Vec<&[int]>>().as_slice(), &[&[]]);
}
#[test]
fn test_splitnator() {
let xs = &[1i,2,3,4,5];
- assert_eq!(xs.splitn(0, |x| *x % 2 == 0).collect::<~[&[int]]>(),
- box [&[1,2,3,4,5]]);
- assert_eq!(xs.splitn(1, |x| *x % 2 == 0).collect::<~[&[int]]>(),
- box [&[1], &[3,4,5]]);
- assert_eq!(xs.splitn(3, |_| true).collect::<~[&[int]]>(),
- box [&[], &[], &[], &[4,5]]);
+ assert_eq!(xs.splitn(0, |x| *x % 2 == 0).collect::<Vec<&[int]>>().as_slice(),
+ &[&[1,2,3,4,5]]);
+ assert_eq!(xs.splitn(1, |x| *x % 2 == 0).collect::<Vec<&[int]>>().as_slice(),
+ &[&[1], &[3,4,5]]);
+ assert_eq!(xs.splitn(3, |_| true).collect::<Vec<&[int]>>().as_slice(),
+ &[&[], &[], &[], &[4,5]]);
let xs: &[int] = &[];
- assert_eq!(xs.splitn(1, |x| *x == 5).collect::<~[&[int]]>(), box [&[]]);
+ assert_eq!(xs.splitn(1, |x| *x == 5).collect::<Vec<&[int]>>().as_slice(), &[&[]]);
}
#[test]
fn test_rsplitator() {
let xs = &[1i,2,3,4,5];
- assert_eq!(xs.split(|x| *x % 2 == 0).rev().collect::<~[&[int]]>(),
- box [&[5], &[3], &[1]]);
- assert_eq!(xs.split(|x| *x == 1).rev().collect::<~[&[int]]>(),
- box [&[2,3,4,5], &[]]);
- assert_eq!(xs.split(|x| *x == 5).rev().collect::<~[&[int]]>(),
- box [&[], &[1,2,3,4]]);
- assert_eq!(xs.split(|x| *x == 10).rev().collect::<~[&[int]]>(),
- box [&[1,2,3,4,5]]);
+ assert_eq!(xs.split(|x| *x % 2 == 0).rev().collect::<Vec<&[int]>>().as_slice(),
+ &[&[5], &[3], &[1]]);
+ assert_eq!(xs.split(|x| *x == 1).rev().collect::<Vec<&[int]>>().as_slice(),
+ &[&[2,3,4,5], &[]]);
+ assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<&[int]>>().as_slice(),
+ &[&[], &[1,2,3,4]]);
+ assert_eq!(xs.split(|x| *x == 10).rev().collect::<Vec<&[int]>>().as_slice(),
+ &[&[1,2,3,4,5]]);
let xs: &[int] = &[];
- assert_eq!(xs.split(|x| *x == 5).rev().collect::<~[&[int]]>(), box [&[]]);
+ assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<&[int]>>().as_slice(), &[&[]]);
}
#[test]
fn test_rsplitnator() {
let xs = &[1,2,3,4,5];
- assert_eq!(xs.rsplitn(0, |x| *x % 2 == 0).collect::<~[&[int]]>(),
- box [&[1,2,3,4,5]]);
- assert_eq!(xs.rsplitn(1, |x| *x % 2 == 0).collect::<~[&[int]]>(),
- box [&[5], &[1,2,3]]);
- assert_eq!(xs.rsplitn(3, |_| true).collect::<~[&[int]]>(),
- box [&[], &[], &[], &[1,2]]);
+ assert_eq!(xs.rsplitn(0, |x| *x % 2 == 0).collect::<Vec<&[int]>>().as_slice(),
+ &[&[1,2,3,4,5]]);
+ assert_eq!(xs.rsplitn(1, |x| *x % 2 == 0).collect::<Vec<&[int]>>().as_slice(),
+ &[&[5], &[1,2,3]]);
+ assert_eq!(xs.rsplitn(3, |_| true).collect::<Vec<&[int]>>().as_slice(),
+ &[&[], &[], &[], &[1,2]]);
let xs: &[int] = &[];
- assert_eq!(xs.rsplitn(1, |x| *x == 5).collect::<~[&[int]]>(), box [&[]]);
+ assert_eq!(xs.rsplitn(1, |x| *x == 5).collect::<Vec<&[int]>>().as_slice(), &[&[]]);
}
#[test]
fn test_windowsator() {
let v = &[1i,2,3,4];
- assert_eq!(v.windows(2).collect::<~[&[int]]>(), box [&[1,2], &[2,3], &[3,4]]);
- assert_eq!(v.windows(3).collect::<~[&[int]]>(), box [&[1i,2,3], &[2,3,4]]);
+ assert_eq!(v.windows(2).collect::<Vec<&[int]>>().as_slice(), &[&[1,2], &[2,3], &[3,4]]);
+ assert_eq!(v.windows(3).collect::<Vec<&[int]>>().as_slice(), &[&[1i,2,3], &[2,3,4]]);
assert!(v.windows(6).next().is_none());
}
fn test_chunksator() {
let v = &[1i,2,3,4,5];
- assert_eq!(v.chunks(2).collect::<~[&[int]]>(), box [&[1i,2], &[3,4], &[5]]);
- assert_eq!(v.chunks(3).collect::<~[&[int]]>(), box [&[1i,2,3], &[4,5]]);
- assert_eq!(v.chunks(6).collect::<~[&[int]]>(), box [&[1i,2,3,4,5]]);
+ assert_eq!(v.chunks(2).collect::<Vec<&[int]>>().as_slice(), &[&[1i,2], &[3,4], &[5]]);
+ assert_eq!(v.chunks(3).collect::<Vec<&[int]>>().as_slice(), &[&[1i,2,3], &[4,5]]);
+ assert_eq!(v.chunks(6).collect::<Vec<&[int]>>().as_slice(), &[&[1i,2,3,4,5]]);
- assert_eq!(v.chunks(2).rev().collect::<~[&[int]]>(), box [&[5i], &[3,4], &[1,2]]);
+ assert_eq!(v.chunks(2).rev().collect::<Vec<&[int]>>().as_slice(), &[&[5i], &[3,4], &[1,2]]);
let mut it = v.chunks(2);
assert_eq!(it.indexable(), 3);
assert_eq!(it.idx(0).unwrap(), &[1,2]);
})
}
- #[bench]
- fn add(b: &mut Bencher) {
- let xs: &[int] = [5, ..10];
- let ys: &[int] = [5, ..10];
- b.iter(|| {
- xs + ys;
- });
- }
-
#[bench]
fn concat(b: &mut Bencher) {
let xss: Vec<Vec<uint>> = Vec::from_fn(100, |i| range(0, i).collect());
use io::Writer;
use iter::{Iterator, range, AdditiveIterator};
use option::{None, Option, Some};
-use ptr;
use from_str::FromStr;
-use slice::{OwnedVector, ImmutableVector, MutableVector};
-use slice::{Vector};
+use slice::{ImmutableVector, MutableVector, CloneableVector};
+use slice::Vector;
use vec::Vec;
use default::Default;
use strbuf::StrBuf;
/// Unsafe operations
pub mod raw {
use cast;
- use iter::Iterator;
use libc;
use ptr::RawPtr;
- use ptr;
- use slice::{MutableVector, OwnedVector, Vector};
- use str::{is_utf8};
- use vec::Vec;
+ use raw::Slice;
+ use slice::CloneableVector;
+ use str::{is_utf8, StrAllocating};
pub use core::str::raw::{from_utf8, c_str_to_static_slice, slice_bytes};
pub use core::str::raw::{slice_unchecked};
/// Create a Rust string from a *u8 buffer of the given length
pub unsafe fn from_buf_len(buf: *u8, len: uint) -> ~str {
- let mut v = Vec::with_capacity(len);
- ptr::copy_memory(v.as_mut_ptr(), buf, len);
- v.set_len(len);
-
- assert!(is_utf8(v.as_slice()));
- ::cast::transmute(v.move_iter().collect::<~[u8]>())
+ let v = Slice { data: buf, len: len };
+ let bytes: &[u8] = ::cast::transmute(v);
+ assert!(is_utf8(bytes));
+ let s: &str = ::cast::transmute(bytes);
+ s.to_owned()
}
#[lang="strdup_uniq"]
/// Copy a slice into a new owned str.
#[inline]
fn to_owned(&self) -> ~str {
- let me = self.as_slice();
- let len = me.len();
- unsafe {
- let mut v = Vec::with_capacity(len);
+ use slice::Vector;
- ptr::copy_memory(v.as_mut_ptr(), me.as_ptr(), len);
- v.set_len(len);
- ::cast::transmute(v.move_iter().collect::<~[u8]>())
+ unsafe {
+ ::cast::transmute(self.as_slice().as_bytes().to_owned())
}
}
/// Converts to a vector of `u16` encoded as UTF-16.
- fn to_utf16(&self) -> ~[u16] {
+ fn to_utf16(&self) -> Vec<u16> {
let me = self.as_slice();
- let mut u = Vec::new();;
+ let mut u = Vec::new();
for ch in me.chars() {
let mut buf = [0u16, ..2];
let n = ch.encode_utf16(buf /* as mut slice! */);
u.push_all(buf.slice_to(n));
}
- u.move_iter().collect()
+ u
}
/// Given a string, make a new string with repeated copies of it.
assert_eq!(a.subslice_offset(c), 0);
let string = "a\nb\nc";
- let lines: ~[&str] = string.lines().collect();
+ let lines: Vec<&str> = string.lines().collect();
+ let lines = lines.as_slice();
assert_eq!(string.subslice_offset(lines[0]), 0);
assert_eq!(string.subslice_offset(lines[1]), 2);
assert_eq!(string.subslice_offset(lines[2]), 4);
fn test_utf16() {
let pairs =
[("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n".to_owned(),
- box [0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16,
+ vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16,
0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16,
0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16,
0xd800_u16, 0xdf30_u16, 0x000a_u16]),
("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n".to_owned(),
- box [0xd801_u16, 0xdc12_u16, 0xd801_u16,
+ vec![0xd801_u16, 0xdc12_u16, 0xd801_u16,
0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16,
0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16,
0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16,
0x000a_u16]),
("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n".to_owned(),
- box [0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16,
+ vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16,
0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16,
0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16,
0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16,
0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]),
("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n".to_owned(),
- box [0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16,
+ vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16,
0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16,
0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16,
0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16,
0x000a_u16 ]),
// Issue #12318, even-numbered non-BMP planes
("\U00020000".to_owned(),
- box [0xD840, 0xDC00])];
+ vec![0xD840, 0xDC00])];
for p in pairs.iter() {
let (s, u) = (*p).clone();
- assert!(is_utf16(u));
+ assert!(is_utf16(u.as_slice()));
assert_eq!(s.to_utf16(), u);
- assert_eq!(from_utf16(u).unwrap(), s);
- assert_eq!(from_utf16_lossy(u), s);
+ assert_eq!(from_utf16(u.as_slice()).unwrap(), s);
+ assert_eq!(from_utf16_lossy(u.as_slice()), s);
- assert_eq!(from_utf16(s.to_utf16()).unwrap(), s);
- assert_eq!(from_utf16(u).unwrap().to_utf16(), u);
+ assert_eq!(from_utf16(s.to_utf16().as_slice()).unwrap(), s);
+ assert_eq!(from_utf16(u.as_slice()).unwrap().to_utf16(), u);
}
}
fn test_split_char_iterator() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
- let split: ~[&str] = data.split(' ').collect();
- assert_eq!( split, box ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+ let split: Vec<&str> = data.split(' ').collect();
+ assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
- let mut rsplit: ~[&str] = data.split(' ').rev().collect();
+ let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
rsplit.reverse();
- assert_eq!(rsplit, box ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+ assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
- let split: ~[&str] = data.split(|c: char| c == ' ').collect();
- assert_eq!( split, box ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+ let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
+ assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
- let mut rsplit: ~[&str] = data.split(|c: char| c == ' ').rev().collect();
+ let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
rsplit.reverse();
- assert_eq!(rsplit, box ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+ assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
// Unicode
- let split: ~[&str] = data.split('ä').collect();
- assert_eq!( split, box ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+ let split: Vec<&str> = data.split('ä').collect();
+ assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
- let mut rsplit: ~[&str] = data.split('ä').rev().collect();
+ let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
rsplit.reverse();
- assert_eq!(rsplit, box ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+ assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
- let split: ~[&str] = data.split(|c: char| c == 'ä').collect();
- assert_eq!( split, box ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+ let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
+ assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
- let mut rsplit: ~[&str] = data.split(|c: char| c == 'ä').rev().collect();
+ let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
rsplit.reverse();
- assert_eq!(rsplit, box ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+ assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
}
#[test]
fn test_splitn_char_iterator() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
- let split: ~[&str] = data.splitn(' ', 3).collect();
- assert_eq!(split, box ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
+ let split: Vec<&str> = data.splitn(' ', 3).collect();
+ assert_eq!(split, vec!["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
- let split: ~[&str] = data.splitn(|c: char| c == ' ', 3).collect();
- assert_eq!(split, box ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
+ let split: Vec<&str> = data.splitn(|c: char| c == ' ', 3).collect();
+ assert_eq!(split, vec!["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
// Unicode
- let split: ~[&str] = data.splitn('ä', 3).collect();
- assert_eq!(split, box ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
+ let split: Vec<&str> = data.splitn('ä', 3).collect();
+ assert_eq!(split, vec!["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
- let split: ~[&str] = data.splitn(|c: char| c == 'ä', 3).collect();
- assert_eq!(split, box ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
+ let split: Vec<&str> = data.splitn(|c: char| c == 'ä', 3).collect();
+ assert_eq!(split, vec!["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
}
#[test]
fn test_rsplitn_char_iterator() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
- let mut split: ~[&str] = data.rsplitn(' ', 3).collect();
+ let mut split: Vec<&str> = data.rsplitn(' ', 3).collect();
split.reverse();
- assert_eq!(split, box ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
+ assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
- let mut split: ~[&str] = data.rsplitn(|c: char| c == ' ', 3).collect();
+ let mut split: Vec<&str> = data.rsplitn(|c: char| c == ' ', 3).collect();
split.reverse();
- assert_eq!(split, box ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
+ assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
// Unicode
- let mut split: ~[&str] = data.rsplitn('ä', 3).collect();
+ let mut split: Vec<&str> = data.rsplitn('ä', 3).collect();
split.reverse();
- assert_eq!(split, box ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
+ assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
- let mut split: ~[&str] = data.rsplitn(|c: char| c == 'ä', 3).collect();
+ let mut split: Vec<&str> = data.rsplitn(|c: char| c == 'ä', 3).collect();
split.reverse();
- assert_eq!(split, box ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
+ assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
}
#[test]
fn test_split_char_iterator_no_trailing() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
- let split: ~[&str] = data.split('\n').collect();
- assert_eq!(split, box ["", "Märy häd ä little lämb", "Little lämb", ""]);
+ let split: Vec<&str> = data.split('\n').collect();
+ assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb", ""]);
- let split: ~[&str] = data.split_terminator('\n').collect();
- assert_eq!(split, box ["", "Märy häd ä little lämb", "Little lämb"]);
+ let split: Vec<&str> = data.split_terminator('\n').collect();
+ assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb"]);
}
#[test]
fn test_rev_split_char_iterator_no_trailing() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
- let mut split: ~[&str] = data.split('\n').rev().collect();
+ let mut split: Vec<&str> = data.split('\n').rev().collect();
split.reverse();
- assert_eq!(split, box ["", "Märy häd ä little lämb", "Little lämb", ""]);
+ assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb", ""]);
- let mut split: ~[&str] = data.split_terminator('\n').rev().collect();
+ let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
split.reverse();
- assert_eq!(split, box ["", "Märy häd ä little lämb", "Little lämb"]);
+ assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb"]);
}
#[test]
fn test_words() {
let data = "\n \tMäry häd\tä little lämb\nLittle lämb\n";
- let words: ~[&str] = data.words().collect();
- assert_eq!(words, box ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"])
+ let words: Vec<&str> = data.words().collect();
+ assert_eq!(words, vec!["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"])
}
#[test]
#[test]
fn test_lines() {
let data = "\nMäry häd ä little lämb\n\nLittle lämb\n";
- let lines: ~[&str] = data.lines().collect();
- assert_eq!(lines, box ["", "Märy häd ä little lämb", "", "Little lämb"]);
+ let lines: Vec<&str> = data.lines().collect();
+ assert_eq!(lines, vec!["", "Märy häd ä little lämb", "", "Little lämb"]);
let data = "\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n
- let lines: ~[&str] = data.lines().collect();
- assert_eq!(lines, box ["", "Märy häd ä little lämb", "", "Little lämb"]);
+ let lines: Vec<&str> = data.lines().collect();
+ assert_eq!(lines, vec!["", "Märy häd ä little lämb", "", "Little lämb"]);
}
#[test]
fn test_split_strator() {
- fn t<'a>(s: &str, sep: &'a str, u: ~[&str]) {
- let v: ~[&str] = s.split_str(sep).collect();
- assert_eq!(v, u);
+ fn t(s: &str, sep: &str, u: &[&str]) {
+ let v: Vec<&str> = s.split_str(sep).collect();
+ assert_eq!(v.as_slice(), u.as_slice());
}
- t("--1233345--", "12345", box ["--1233345--"]);
- t("abc::hello::there", "::", box ["abc", "hello", "there"]);
- t("::hello::there", "::", box ["", "hello", "there"]);
- t("hello::there::", "::", box ["hello", "there", ""]);
- t("::hello::there::", "::", box ["", "hello", "there", ""]);
- t("ประเทศไทย中华Việt Nam", "中华", box ["ประเทศไทย", "Việt Nam"]);
- t("zzXXXzzYYYzz", "zz", box ["", "XXX", "YYY", ""]);
- t("zzXXXzYYYz", "XXX", box ["zz", "zYYYz"]);
- t(".XXX.YYY.", ".", box ["", "XXX", "YYY", ""]);
- t("", ".", box [""]);
- t("zz", "zz", box ["",""]);
- t("ok", "z", box ["ok"]);
- t("zzz", "zz", box ["","z"]);
- t("zzzzz", "zz", box ["","","z"]);
+ t("--1233345--", "12345", ["--1233345--"]);
+ t("abc::hello::there", "::", ["abc", "hello", "there"]);
+ t("::hello::there", "::", ["", "hello", "there"]);
+ t("hello::there::", "::", ["hello", "there", ""]);
+ t("::hello::there::", "::", ["", "hello", "there", ""]);
+ t("ประเทศไทย中华Việt Nam", "中华", ["ประเทศไทย", "Việt Nam"]);
+ t("zzXXXzzYYYzz", "zz", ["", "XXX", "YYY", ""]);
+ t("zzXXXzYYYz", "XXX", ["zz", "zYYYz"]);
+ t(".XXX.YYY.", ".", ["", "XXX", "YYY", ""]);
+ t("", ".", [""]);
+ t("zz", "zz", ["",""]);
+ t("ok", "z", ["ok"]);
+ t("zzz", "zz", ["","z"]);
+ t("zzzzz", "zz", ["","","z"]);
}
#[test]
use iter::{Extendable, FromIterator, Iterator, range};
use option::{None, Option, Some};
use ptr::RawPtr;
-use slice::{OwnedVector, Vector};
+use slice::{OwnedVector, Vector, CloneableVector};
use str::{OwnedStr, Str, StrSlice, StrAllocating};
use str;
use vec::Vec;
impl StrAllocating for StrBuf {
#[inline]
fn into_owned(self) -> ~str {
- let StrBuf {
- vec: vec
- } = self;
unsafe {
- cast::transmute::<~[u8],~str>(vec.move_iter().collect())
+ cast::transmute(self.vec.as_slice().to_owned())
}
}
/// As new(), but returns a vector of as many pre-cloned handles as
/// requested.
- pub fn newN(data: T, num_handles: uint) -> ~[UnsafeArc<T>] {
+ pub fn newN(data: T, num_handles: uint) -> Vec<UnsafeArc<T>> {
unsafe {
if num_handles == 0 {
- box [] // need to free data here
+ vec![] // need to free data here
} else {
let ptr = new_inner(data, num_handles);
let v = Vec::from_fn(num_handles, |_| UnsafeArc { data: ptr });
- v.move_iter().collect()
+ v
}
}
}
use rand::Rng;
use sync::atomics::{AtomicBool, INIT_ATOMIC_BOOL, SeqCst,
AtomicUint, INIT_ATOMIC_UINT};
- use slice;
+ use vec;
#[test]
fn smoke() {
let mut pool = BufferPool::<(int, uint)>::new();
let (mut w, s) = pool.deque();
- let (threads, hits) = slice::unzip(range(0, NTHREADS).map(|_| {
+ let (threads, hits) = vec::unzip(range(0, NTHREADS).map(|_| {
let s = s.clone();
let unique_box = box AtomicUint::new(0);
let thread_box = unsafe {
use c_str::ToCStr;
use cast;
+use iter::Iterator;
use ops::*;
use option::*;
use os;
use path::GenericPath;
use path;
use result::*;
+use slice::{Vector,OwnedVector};
use str;
+use vec::Vec;
pub struct DynamicLibrary { handle: *u8}
("LD_LIBRARY_PATH", ':' as u8)
};
let newenv = os::getenv_as_bytes(envvar).unwrap_or(box []);
- let newenv = newenv + &[sep] + path.as_vec();
- os::setenv(envvar, str::from_utf8(newenv).unwrap());
+ let mut newenv = newenv.move_iter().collect::<Vec<_>>();
+ newenv.push_all(&[sep]);
+ newenv.push_all(path.as_vec());
+ os::setenv(envvar, str::from_utf8(newenv.as_slice()).unwrap());
}
/// Access the value at the symbol of the dynamic library
use mem;
use num;
use num::{CheckedMul, CheckedAdd};
-use ops::Drop;
+use ops::{Add, Drop};
use option::{None, Option, Some, Expect};
use ptr::RawPtr;
use ptr;
use rt::global_heap::{malloc_raw, realloc_raw};
use raw::Slice;
+use RawVec = raw::Vec;
use slice::{ImmutableEqVector, ImmutableVector, Items, MutItems, MutableVector};
use slice::{MutableTotalOrdVector, OwnedVector, Vector};
use slice::{MutableVectorAllocating};
}
}
+impl<T: Clone, V: Vector<T>> Add<V, Vec<T>> for Vec<T> {
+ #[inline]
+ fn add(&self, rhs: &V) -> Vec<T> {
+ let mut res = Vec::with_capacity(self.len() + rhs.as_slice().len());
+ res.push_all(self.as_slice());
+ res.push_all(rhs.as_slice());
+ res
+ }
+}
+
#[unsafe_destructor]
impl<T> Drop for Vec<T> {
fn drop(&mut self) {
}
}
+/**
+ * Convert an iterator of pairs into a pair of vectors.
+ *
+ * Returns a tuple containing two vectors where the i-th element of the first
+ * vector contains the first element of the i-th tuple of the input iterator,
+ * and the i-th element of the second vector contains the second element
+ * of the i-th tuple of the input iterator.
+ */
+pub fn unzip<T, U, V: Iterator<(T, U)>>(mut iter: V) -> (Vec<T>, Vec<U>) {
+ let (lo, _) = iter.size_hint();
+ let mut ts = Vec::with_capacity(lo);
+ let mut us = Vec::with_capacity(lo);
+ for (t, u) in iter {
+ ts.push(t);
+ us.push(u);
+ }
+ (ts, us)
+}
+
+/// Mechanism to convert from a `Vec<T>` to a `[T]`.
+///
+/// In a post-DST world this will be used to convert to any `Ptr<[T]>`.
+///
+/// This could be implemented on more types than just pointers to vectors, but
+/// the recommended approach for those types is to implement `FromIterator`.
+// FIXME(#12938): Update doc comment when DST lands
+pub trait FromVec<T> {
+ /// Convert a `Vec<T>` into the receiver type.
+ fn from_vec(v: Vec<T>) -> Self;
+}
+
+impl<T> FromVec<T> for ~[T] {
+ fn from_vec(mut v: Vec<T>) -> ~[T] {
+ let len = v.len();
+ let data_size = len.checked_mul(&mem::size_of::<T>());
+ let data_size = data_size.expect("overflow in from_vec()");
+ let size = mem::size_of::<RawVec<()>>().checked_add(&data_size);
+ let size = size.expect("overflow in from_vec()");
+
+ // In a post-DST world, we can attempt to reuse the Vec allocation by calling
+ // shrink_to_fit() on it. That may involve a reallocation+memcpy, but that's no
+ // diffrent than what we're doing manually here.
+
+ let vp = v.as_mut_ptr();
+
+ unsafe {
+ let ret = malloc_raw(size) as *mut RawVec<()>;
+
+ (*ret).fill = len * mem::nonzero_size_of::<T>();
+ (*ret).alloc = len * mem::nonzero_size_of::<T>();
+
+ ptr::copy_nonoverlapping_memory(&mut (*ret).data as *mut _ as *mut u8,
+ vp as *u8, data_size);
+
+ // we've transferred ownership of the contents from v, but we can't drop it
+ // as it still needs to free its own allocation.
+ v.set_len(0);
+
+ transmute(ret)
+ }
+ }
+}
+
+/// Unsafe operations
+pub mod raw {
+ use super::Vec;
+ use ptr;
+
+ /// Constructs a vector from an unsafe pointer to a buffer.
+ ///
+ /// The elements of the buffer are copied into the vector without cloning,
+ /// as if `ptr::read()` were called on them.
+ #[inline]
+ pub unsafe fn from_buf<T>(ptr: *T, elts: uint) -> Vec<T> {
+ let mut dst = Vec::with_capacity(elts);
+ dst.set_len(elts);
+ ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
+ dst
+ }
+}
+
+
#[cfg(test)]
mod tests {
use prelude::*;
use mem::size_of;
+ use kinds::marker;
+ use super::{unzip, raw, FromVec};
#[test]
fn test_small_vec_struct() {
unsafe { v.set_len(0); }
assert_eq!(v.mut_iter().len(), 0);
}
+
+ #[test]
+ fn test_partition() {
+ assert_eq!(vec![].partition(|x: &int| *x < 3), (vec![], vec![]));
+ assert_eq!(vec![1, 2, 3].partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
+ assert_eq!(vec![1, 2, 3].partition(|x: &int| *x < 2), (vec![1], vec![2, 3]));
+ assert_eq!(vec![1, 2, 3].partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
+ }
+
+ #[test]
+ fn test_partitioned() {
+ assert_eq!(vec![].partitioned(|x: &int| *x < 3), (vec![], vec![]))
+ assert_eq!(vec![1, 2, 3].partitioned(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
+ assert_eq!(vec![1, 2, 3].partitioned(|x: &int| *x < 2), (vec![1], vec![2, 3]));
+ assert_eq!(vec![1, 2, 3].partitioned(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
+ }
+
+ #[test]
+ fn test_zip_unzip() {
+ let z1 = vec![(1, 4), (2, 5), (3, 6)];
+
+ let (left, right) = unzip(z1.iter().map(|&x| x));
+
+ let (left, right) = (left.as_slice(), right.as_slice());
+ assert_eq!((1, 4), (left[0], right[0]));
+ assert_eq!((2, 5), (left[1], right[1]));
+ assert_eq!((3, 6), (left[2], right[2]));
+ }
+
+ #[test]
+ fn test_unsafe_ptrs() {
+ unsafe {
+ // Test on-stack copy-from-buf.
+ let a = [1, 2, 3];
+ let ptr = a.as_ptr();
+ let b = raw::from_buf(ptr, 3u);
+ assert_eq!(b, vec![1, 2, 3]);
+
+ // Test on-heap copy-from-buf.
+ let c = box [1, 2, 3, 4, 5];
+ let ptr = c.as_ptr();
+ let d = raw::from_buf(ptr, 5u);
+ assert_eq!(d, vec![1, 2, 3, 4, 5]);
+ }
+ }
+
+ #[test]
+ fn test_from_vec() {
+ let a = vec![1u, 2, 3];
+ let b: ~[uint] = FromVec::from_vec(a);
+ assert_eq!(b.as_slice(), &[1u, 2, 3]);
+
+ let a = vec![];
+ let b: ~[u8] = FromVec::from_vec(a);
+ assert_eq!(b.as_slice(), &[]);
+
+ let a = vec!["one".to_owned(), "two".to_owned()];
+ let b: ~[~str] = FromVec::from_vec(a);
+ assert_eq!(b.as_slice(), &["one".to_owned(), "two".to_owned()]);
+
+ struct Foo {
+ x: uint,
+ nocopy: marker::NoCopy
+ }
+
+ let a = vec![Foo{x: 42, nocopy: marker::NoCopy}, Foo{x: 84, nocopy: marker::NoCopy}];
+ let b: ~[Foo] = FromVec::from_vec(a);
+ assert_eq!(b.len(), 2);
+ assert_eq!(b[0].x, 42);
+ assert_eq!(b[1].x, 84);
+ }
}
/// The type of the iterator used by with_path.
pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>;
-pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str {
+pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> StrBuf {
let itr = token::get_ident_interner();
path.fold(StrBuf::new(), |mut s, e| {
}
s.push_str(e.as_slice());
s
- }).into_owned()
+ }).to_strbuf()
}
#[deriving(Clone)]
self.with_path_next(id, None, f)
}
- pub fn path_to_str(&self, id: NodeId) -> ~str {
+ pub fn path_to_str(&self, id: NodeId) -> StrBuf {
self.with_path(id, |path| path_to_str(path))
}
- fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> ~str {
+ fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> StrBuf {
self.with_path(id, |path| {
path_to_str(path.chain(Some(PathName(i.name)).move_iter()))
})
}
}
- pub fn node_to_str(&self, id: NodeId) -> ~str {
+ pub fn node_to_str(&self, id: NodeId) -> StrBuf {
node_id_to_str(self, id)
}
}
ii
}
-fn node_id_to_str(map: &Map, id: NodeId) -> ~str {
+fn node_id_to_str(map: &Map, id: NodeId) -> StrBuf {
match map.find(id) {
Some(NodeItem(item)) => {
let path_str = map.path_to_str_with_ident(id, item.ident);
ItemImpl(..) => "impl",
ItemMac(..) => "macro"
};
- format!("{} {} (id={})", item_str, path_str, id)
+ (format!("{} {} (id={})", item_str, path_str, id)).to_strbuf()
}
Some(NodeForeignItem(item)) => {
let path_str = map.path_to_str_with_ident(id, item.ident);
- format!("foreign item {} (id={})", path_str, id)
+ (format!("foreign item {} (id={})", path_str, id)).to_strbuf()
}
Some(NodeMethod(m)) => {
- format!("method {} in {} (id={})",
+ (format!("method {} in {} (id={})",
token::get_ident(m.ident),
- map.path_to_str(id), id)
+ map.path_to_str(id), id)).to_strbuf()
}
Some(NodeTraitMethod(ref tm)) => {
let m = ast_util::trait_method_to_ty_method(&**tm);
- format!("method {} in {} (id={})",
+ (format!("method {} in {} (id={})",
token::get_ident(m.ident),
- map.path_to_str(id), id)
+ map.path_to_str(id), id)).to_strbuf()
}
Some(NodeVariant(ref variant)) => {
- format!("variant {} in {} (id={})",
+ (format!("variant {} in {} (id={})",
token::get_ident(variant.node.name),
- map.path_to_str(id), id)
+ map.path_to_str(id), id)).to_strbuf()
}
Some(NodeExpr(expr)) => {
- format!("expr {} (id={})", pprust::expr_to_str(expr), id)
+ (format!("expr {} (id={})",
+ pprust::expr_to_str(expr), id)).to_strbuf()
}
Some(NodeStmt(stmt)) => {
- format!("stmt {} (id={})", pprust::stmt_to_str(stmt), id)
+ (format!("stmt {} (id={})",
+ pprust::stmt_to_str(stmt), id)).to_strbuf()
}
Some(NodeArg(pat)) => {
- format!("arg {} (id={})", pprust::pat_to_str(pat), id)
+ (format!("arg {} (id={})",
+ pprust::pat_to_str(pat), id)).to_strbuf()
}
Some(NodeLocal(pat)) => {
- format!("local {} (id={})", pprust::pat_to_str(pat), id)
+ (format!("local {} (id={})",
+ pprust::pat_to_str(pat), id)).to_strbuf()
}
Some(NodeBlock(block)) => {
- format!("block {} (id={})", pprust::block_to_str(block), id)
+ (format!("block {} (id={})",
+ pprust::block_to_str(block), id)).to_strbuf()
}
Some(NodeStructCtor(_)) => {
- format!("struct_ctor {} (id={})", map.path_to_str(id), id)
+ (format!("struct_ctor {} (id={})",
+ map.path_to_str(id), id)).to_strbuf()
}
Some(NodeLifetime(ref l)) => {
- format!("lifetime {} (id={})", pprust::lifetime_to_str(*l), id)
+ (format!("lifetime {} (id={})",
+ pprust::lifetime_to_str(*l), id)).to_strbuf()
}
None => {
- format!("unknown node (id={})", id)
+ (format!("unknown node (id={})", id)).to_strbuf()
}
}
}
use std::strbuf::StrBuf;
use std::u32;
-pub fn path_name_i(idents: &[Ident]) -> ~str {
+pub fn path_name_i(idents: &[Ident]) -> StrBuf {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| {
- token::get_ident(*i).get().to_str()
- }).collect::<Vec<~str>>().connect("::")
+ token::get_ident(*i).get().to_strbuf()
+ }).collect::<Vec<StrBuf>>().connect("::").to_strbuf()
}
// totally scary function: ignores all but the last element, should have
// Get a string representation of a signed int type, with its value.
// We want to avoid "45int" and "-3int" in favor of "45" and "-3"
-pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> ~str {
+pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> StrBuf {
let s = match t {
TyI if val.is_some() => "",
TyI => "int",
};
match val {
- Some(n) => format!("{}{}", n, s),
- None => s.to_owned()
+ Some(n) => format!("{}{}", n, s).to_strbuf(),
+ None => s.to_strbuf()
}
}
// Get a string representation of an unsigned int type, with its value.
// We want to avoid "42uint" in favor of "42u"
-pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> ~str {
+pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> StrBuf {
let s = match t {
TyU if val.is_some() => "u",
TyU => "uint",
};
match val {
- Some(n) => format!("{}{}", n, s),
- None => s.to_owned()
+ Some(n) => format!("{}{}", n, s).to_strbuf(),
+ None => s.to_strbuf()
}
}
}
}
-pub fn float_ty_to_str(t: FloatTy) -> ~str {
- match t { TyF32 => "f32".to_owned(), TyF64 => "f64".to_owned(), TyF128 => "f128".to_owned() }
+pub fn float_ty_to_str(t: FloatTy) -> StrBuf {
+ match t {
+ TyF32 => "f32".to_strbuf(),
+ TyF64 => "f64".to_strbuf(),
+ TyF128 => "f128".to_strbuf(),
+ }
}
pub fn is_call_expr(e: @Expr) -> bool {
/// listed as `__extensions__::method_name::hash`, with no indication
/// of the type).
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident {
- let mut pretty = StrBuf::from_owned_str(pprust::ty_to_str(ty));
+ let mut pretty = pprust::ty_to_str(ty);
match *trait_ref {
Some(ref trait_ref) => {
pretty.push_char('.');
- pretty.push_str(pprust::path_to_str(&trait_ref.path));
+ pretty.push_str(pprust::path_to_str(&trait_ref.path).as_slice());
}
None => {}
}
let meta = mk_name_value_item_str(
InternedString::new("doc"),
token::intern_and_get_ident(strip_doc_comment_decoration(
- comment.get())));
+ comment.get()).as_slice()));
mk_attr(meta)
} else {
*self
pub struct NameAndSpan {
/// The name of the macro that was invoked to create the thing
/// with this Span.
- pub name: ~str,
+ pub name: StrBuf,
/// The format with which the macro was invoked.
pub format: MacroFormat,
/// The span of the macro definition itself. The macro may not
pub callee: NameAndSpan
}
-pub type FileName = ~str;
+pub type FileName = StrBuf;
pub struct FileLines {
pub file: Rc<FileMap>,
/// e.g. `<anon>`
pub name: FileName,
/// The complete source code
- pub src: ~str,
+ pub src: StrBuf,
/// The start position of this source in the CodeMap
pub start_pos: BytePos,
/// Locations of lines beginnings in the source code
}
// get a line from the list of pre-computed line-beginnings
- pub fn get_line(&self, line: int) -> ~str {
+ pub fn get_line(&self, line: int) -> StrBuf {
let mut lines = self.lines.borrow_mut();
let begin: BytePos = *lines.get(line as uint) - self.start_pos;
let begin = begin.to_uint();
- let slice = self.src.slice_from(begin);
+ let slice = self.src.as_slice().slice_from(begin);
match slice.find('\n') {
- Some(e) => slice.slice_to(e).to_owned(),
- None => slice.to_owned()
+ Some(e) => slice.slice_to(e).to_strbuf(),
+ None => slice.to_strbuf()
}
}
}
pub fn is_real_file(&self) -> bool {
- !(self.name.starts_with("<") && self.name.ends_with(">"))
+ !(self.name.as_slice().starts_with("<") &&
+ self.name.as_slice().ends_with(">"))
}
}
}
}
- pub fn new_filemap(&self, filename: FileName, src: ~str) -> Rc<FileMap> {
+ pub fn new_filemap(&self, filename: FileName, src: StrBuf) -> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
// Remove utf-8 BOM if any.
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
- let mut src = if src.starts_with("\ufeff") {
+ let mut src = if src.as_slice().starts_with("\ufeff") {
StrBuf::from_str(src.as_slice().slice_from(3))
} else {
- StrBuf::from_owned_str(src)
+ StrBuf::from_str(src.as_slice())
};
// Append '\n' in case it's not already there.
let filemap = Rc::new(FileMap {
name: filename,
- src: src.into_owned(),
+ src: src.to_strbuf(),
start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
filemap
}
- pub fn mk_substr_filename(&self, sp: Span) -> ~str {
+ pub fn mk_substr_filename(&self, sp: Span) -> StrBuf {
let pos = self.lookup_char_pos(sp.lo);
- format!("<{}:{}:{}>", pos.file.name, pos.line, pos.col.to_uint() + 1)
+ (format!("<{}:{}:{}>",
+ pos.file.name,
+ pos.line,
+ pos.col.to_uint() + 1)).to_strbuf()
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
- filename: loc.file.name.to_str(),
+ filename: loc.file.name.to_strbuf(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
}
- pub fn span_to_str(&self, sp: Span) -> ~str {
+ pub fn span_to_str(&self, sp: Span) -> StrBuf {
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
- return "no-location".to_owned();
+ return "no-location".to_strbuf();
}
let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi);
- return format!("{}:{}:{}: {}:{}", lo.filename,
- lo.line, lo.col.to_uint() + 1, hi.line, hi.col.to_uint() + 1)
+ return (format!("{}:{}:{}: {}:{}",
+ lo.filename,
+ lo.line,
+ lo.col.to_uint() + 1,
+ hi.line,
+ hi.col.to_uint() + 1)).to_strbuf()
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
- self.lookup_char_pos(sp.lo).file.name.to_str()
+ self.lookup_char_pos(sp.lo).file.name.to_strbuf()
}
pub fn span_to_lines(&self, sp: Span) -> FileLines {
FileLines {file: lo.file, lines: lines}
}
- pub fn span_to_snippet(&self, sp: Span) -> Option<~str> {
+ pub fn span_to_snippet(&self, sp: Span) -> Option<StrBuf> {
let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi);
if begin.fm.start_pos != end.fm.start_pos {
None
} else {
- Some(begin.fm.src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned())
+ Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
+ end.pos.to_uint()).to_strbuf())
}
}
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
for fm in self.files.borrow().iter() {
- if filename == fm.name {
+ if filename == fm.name.as_slice() {
return fm.clone();
}
}
#[test]
fn t1 () {
let cm = CodeMap::new();
- let fm = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned());
+ let fm = cm.new_filemap("blork.rs".to_strbuf(),
+ "first line.\nsecond line".to_strbuf());
fm.next_line(BytePos(0));
- assert_eq!(&fm.get_line(0),&"first line.".to_owned());
+ assert_eq!(&fm.get_line(0),&"first line.".to_strbuf());
// TESTING BROKEN BEHAVIOR:
fm.next_line(BytePos(10));
- assert_eq!(&fm.get_line(1),&".".to_owned());
+ assert_eq!(&fm.get_line(1), &".".to_strbuf());
}
#[test]
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
- let fm = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned());
+ let fm = cm.new_filemap("blork.rs".to_strbuf(),
+ "first line.\nsecond line".to_strbuf());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
- let fm1 = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned());
- let fm2 = cm.new_filemap("empty.rs".to_owned(),"".to_owned());
- let fm3 = cm.new_filemap("blork2.rs".to_owned(),"first line.\nsecond line".to_owned());
+ let fm1 = cm.new_filemap("blork.rs".to_strbuf(),
+ "first line.\nsecond line".to_strbuf());
+ let fm2 = cm.new_filemap("empty.rs".to_strbuf(),
+ "".to_strbuf());
+ let fm3 = cm.new_filemap("blork2.rs".to_strbuf(),
+ "first line.\nsecond line".to_strbuf());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12));
let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(22));
- assert_eq!(fmabp1.fm.name, "blork.rs".to_owned());
+ assert_eq!(fmabp1.fm.name, "blork.rs".to_strbuf());
assert_eq!(fmabp1.pos, BytePos(22));
let fmabp2 = cm.lookup_byte_offset(BytePos(24));
- assert_eq!(fmabp2.fm.name, "blork2.rs".to_owned());
+ assert_eq!(fmabp2.fm.name, "blork2.rs".to_strbuf());
assert_eq!(fmabp2.pos, BytePos(0));
}
let cm = init_code_map();
let loc1 = cm.lookup_char_pos(BytePos(22));
- assert_eq!(loc1.file.name, "blork.rs".to_owned());
+ assert_eq!(loc1.file.name, "blork.rs".to_strbuf());
assert_eq!(loc1.line, 2);
assert_eq!(loc1.col, CharPos(10));
let loc2 = cm.lookup_char_pos(BytePos(24));
- assert_eq!(loc2.file.name, "blork2.rs".to_owned());
+ assert_eq!(loc2.file.name, "blork2.rs".to_strbuf());
assert_eq!(loc2.line, 1);
assert_eq!(loc2.col, CharPos(0));
}
fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new();
// € is a three byte utf8 char.
- let fm1 = cm.new_filemap("blork.rs".to_owned(),"fir€st €€€€ line.\nsecond line".to_owned());
- let fm2 = cm.new_filemap("blork2.rs".to_owned(),"first line€€.\n€ second line".to_owned());
+ let fm1 =
+ cm.new_filemap("blork.rs".to_strbuf(),
+ "fir€st €€€€ line.\nsecond line".to_strbuf());
+ let fm2 = cm.new_filemap("blork2.rs".to_strbuf(),
+ "first line€€.\n€ second line".to_strbuf());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(22));
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
let file_lines = cm.span_to_lines(span);
- assert_eq!(file_lines.file.name, "blork.rs".to_owned());
+ assert_eq!(file_lines.file.name, "blork.rs".to_strbuf());
assert_eq!(file_lines.lines.len(), 1);
assert_eq!(*file_lines.lines.get(0), 1u);
}
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
let snippet = cm.span_to_snippet(span);
- assert_eq!(snippet, Some("second line".to_owned()));
+ assert_eq!(snippet, Some("second line".to_strbuf()));
}
#[test]
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
let sstr = cm.span_to_str(span);
- assert_eq!(sstr, "blork.rs:2:1: 2:12".to_owned());
+ assert_eq!(sstr, "blork.rs:2:1: 2:12".to_strbuf());
}
}
pub struct CrateId {
/// A path which represents the codes origin. By convention this is the
/// URL, without `http://` or `https://` prefix, to the crate's repository
- pub path: ~str,
+ pub path: StrBuf,
/// The name of the crate.
- pub name: ~str,
+ pub name: StrBuf,
/// The version of the crate.
- pub version: Option<~str>,
+ pub version: Option<StrBuf>,
}
impl fmt::Show for CrateId {
None => "0.0",
Some(ref version) => version.as_slice(),
};
- if self.path == self.name || self.path.ends_with(format!("/{}", self.name)) {
+ if self.path == self.name ||
+ self.path.as_slice().ends_with(format!("/{}", self.name)) {
write!(f.buf, "\\#{}", version)
} else {
write!(f.buf, "\\#{}:{}", self.name, version)
let inferred_name = *path_pieces.get(0);
let (name, version) = if pieces.len() == 1 {
- (inferred_name.to_owned(), None)
+ (inferred_name.to_strbuf(), None)
} else {
let hash_pieces: Vec<&str> = pieces.get(1)
.splitn(':', 1)
};
let name = if !hash_name.is_empty() {
- hash_name.to_owned()
+ hash_name.to_strbuf()
} else {
- inferred_name.to_owned()
+ inferred_name.to_strbuf()
};
let version = if !hash_version.is_empty() {
if hash_version == "0.0" {
None
} else {
- Some(hash_version.to_owned())
+ Some(hash_version.to_strbuf())
}
} else {
None
};
Some(CrateId {
- path: path.clone(),
+ path: path.to_strbuf(),
name: name,
version: version,
})
}
}
- pub fn short_name_with_version(&self) -> ~str {
- format!("{}-{}", self.name, self.version_or_default())
+ pub fn short_name_with_version(&self) -> StrBuf {
+ (format!("{}-{}", self.name, self.version_or_default())).to_strbuf()
}
pub fn matches(&self, other: &CrateId) -> bool {
#[test]
fn bare_name() {
let crateid: CrateId = from_str("foo").expect("valid crateid");
- assert_eq!(crateid.name, "foo".to_owned());
+ assert_eq!(crateid.name, "foo".to_strbuf());
assert_eq!(crateid.version, None);
- assert_eq!(crateid.path, "foo".to_owned());
+ assert_eq!(crateid.path, "foo".to_strbuf());
}
#[test]
fn bare_name_single_char() {
let crateid: CrateId = from_str("f").expect("valid crateid");
- assert_eq!(crateid.name, "f".to_owned());
+ assert_eq!(crateid.name, "f".to_strbuf());
assert_eq!(crateid.version, None);
- assert_eq!(crateid.path, "f".to_owned());
+ assert_eq!(crateid.path, "f".to_strbuf());
}
#[test]
#[test]
fn simple_path() {
let crateid: CrateId = from_str("example.com/foo/bar").expect("valid crateid");
- assert_eq!(crateid.name, "bar".to_owned());
+ assert_eq!(crateid.name, "bar".to_strbuf());
assert_eq!(crateid.version, None);
- assert_eq!(crateid.path, "example.com/foo/bar".to_owned());
+ assert_eq!(crateid.path, "example.com/foo/bar".to_strbuf());
}
#[test]
fn simple_version() {
let crateid: CrateId = from_str("foo#1.0").expect("valid crateid");
- assert_eq!(crateid.name, "foo".to_owned());
- assert_eq!(crateid.version, Some("1.0".to_owned()));
- assert_eq!(crateid.path, "foo".to_owned());
+ assert_eq!(crateid.name, "foo".to_strbuf());
+ assert_eq!(crateid.version, Some("1.0".to_strbuf()));
+ assert_eq!(crateid.path, "foo".to_strbuf());
}
#[test]
#[test]
fn path_and_version() {
let crateid: CrateId = from_str("example.com/foo/bar#1.0").expect("valid crateid");
- assert_eq!(crateid.name, "bar".to_owned());
- assert_eq!(crateid.version, Some("1.0".to_owned()));
- assert_eq!(crateid.path, "example.com/foo/bar".to_owned());
+ assert_eq!(crateid.name, "bar".to_strbuf());
+ assert_eq!(crateid.version, Some("1.0".to_strbuf()));
+ assert_eq!(crateid.path, "example.com/foo/bar".to_strbuf());
}
#[test]
fn single_chars() {
let crateid: CrateId = from_str("a/b#1").expect("valid crateid");
- assert_eq!(crateid.name, "b".to_owned());
- assert_eq!(crateid.version, Some("1".to_owned()));
- assert_eq!(crateid.path, "a/b".to_owned());
+ assert_eq!(crateid.name, "b".to_strbuf());
+ assert_eq!(crateid.version, Some("1".to_strbuf()));
+ assert_eq!(crateid.path, "a/b".to_strbuf());
}
#[test]
fn missing_version() {
let crateid: CrateId = from_str("foo#").expect("valid crateid");
- assert_eq!(crateid.name, "foo".to_owned());
+ assert_eq!(crateid.name, "foo".to_strbuf());
assert_eq!(crateid.version, None);
- assert_eq!(crateid.path, "foo".to_owned());
+ assert_eq!(crateid.path, "foo".to_strbuf());
}
#[test]
fn path_and_name() {
let crateid: CrateId = from_str("foo/rust-bar#bar:1.0").expect("valid crateid");
- assert_eq!(crateid.name, "bar".to_owned());
- assert_eq!(crateid.version, Some("1.0".to_owned()));
- assert_eq!(crateid.path, "foo/rust-bar".to_owned());
+ assert_eq!(crateid.name, "bar".to_strbuf());
+ assert_eq!(crateid.version, Some("1.0".to_strbuf()));
+ assert_eq!(crateid.path, "foo/rust-bar".to_strbuf());
}
#[test]
fn empty_name() {
let crateid: CrateId = from_str("foo/bar#:1.0").expect("valid crateid");
- assert_eq!(crateid.name, "bar".to_owned());
- assert_eq!(crateid.version, Some("1.0".to_owned()));
- assert_eq!(crateid.path, "foo/bar".to_owned());
+ assert_eq!(crateid.name, "bar".to_strbuf());
+ assert_eq!(crateid.version, Some("1.0".to_strbuf()));
+ assert_eq!(crateid.path, "foo/bar".to_strbuf());
}
// the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_info: sp.expn_info};
let ses = cm.span_to_str(span_end);
- try!(print_diagnostic(dst, ses, lvl, msg));
+ try!(print_diagnostic(dst, ses.as_slice(), lvl, msg));
if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, lines));
}
} else {
- try!(print_diagnostic(dst, ss, lvl, msg));
+ try!(print_diagnostic(dst, ss.as_slice(), lvl, msg));
if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, lines));
}
}
let orig = fm.get_line(*lines.lines.get(0) as int);
for pos in range(0u, left-skip) {
- let cur_char = orig[pos] as char;
+ let cur_char = orig.as_slice()[pos] as char;
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
sp: Span)
-> io::IoResult<()> {
for ei in sp.expn_info.iter() {
- let ss = ei.callee.span.as_ref().map_or("".to_owned(), |span| cm.span_to_str(*span));
+ let ss = ei.callee
+ .span
+ .as_ref()
+ .map_or("".to_strbuf(), |span| cm.span_to_str(*span));
let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!")
};
- try!(print_diagnostic(w, ss, Note,
+ try!(print_diagnostic(w, ss.as_slice(), Note,
format!("in expansion of {}{}{}", pre,
ei.callee.name, post)));
let ss = cm.span_to_str(ei.call_site);
- try!(print_diagnostic(w, ss, Note, "expansion site"));
+ try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site"));
try!(print_macro_backtrace(w, cm, ei.call_site));
}
Ok(())
}
-pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> ~str) -> T {
+pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> StrBuf)
+ -> T {
match opt {
Some(ref t) => (*t).clone(),
- None => diag.handler().bug(msg()),
+ None => diag.handler().bug(msg().as_slice()),
}
}
// ast::MacInvocTT.
pub struct MacroDef {
- pub name: ~str,
+ pub name: StrBuf,
pub ext: SyntaxExtension
}
pub struct MacroCrate {
pub lib: Option<Path>,
- pub macros: Vec<~str>,
- pub registrar_symbol: Option<~str>,
+ pub macros: Vec<StrBuf>,
+ pub registrar_symbol: Option<StrBuf>,
}
pub trait CrateLoader {
pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
pub fn mod_path(&self) -> Vec<ast::Ident> {
let mut v = Vec::new();
- v.push(token::str_to_ident(self.ecfg.crate_id.name));
+ v.push(token::str_to_ident(self.ecfg.crate_id.name.as_slice()));
v.extend(self.mod_path.iter().map(|a| *a));
return v;
}
sp: Span,
tts: &[ast::TokenTree],
name: &str)
- -> Option<~str> {
+ -> Option<StrBuf> {
if tts.len() != 1 {
cx.span_err(sp, format!("{} takes 1 argument.", name));
} else {
match tts[0] {
ast::TTTok(_, token::LIT_STR(ident))
| ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
- return Some(token::get_ident(ident).get().to_str())
+ return Some(token::get_ident(ident).get().to_strbuf())
}
_ => cx.span_err(sp, format!("{} requires a string.", name)),
}
vec!(
self.expr_str(span, msg),
self.expr_str(span,
- token::intern_and_get_ident(loc.file.name)),
+ token::intern_and_get_ident(loc.file
+ .name
+ .as_slice())),
self.expr_uint(span, loc.line)))
}
to_set.expn_info = Some(@codemap::ExpnInfo {
call_site: to_set,
callee: codemap::NameAndSpan {
- name: format!("deriving({})", trait_name),
+ name: format!("deriving({})", trait_name).to_strbuf(),
format: codemap::MacroAttribute,
span: Some(self.span)
}
Some(v) => v
};
- let e = match os::getenv(var) {
+ let e = match os::getenv(var.as_slice()) {
None => {
cx.expr_path(cx.path_all(sp,
true,
fld.cx.bt_push(ExpnInfo {
call_site: e.span,
callee: NameAndSpan {
- name: extnamestr.get().to_str(),
+ name: extnamestr.get().to_strbuf(),
format: MacroBang,
span: exp_span,
},
fld.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
- name: mname.get().to_str(),
+ name: mname.get().to_strbuf(),
format: MacroAttribute,
span: None
}
fld.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
- name: mname.get().to_str(),
+ name: mname.get().to_strbuf(),
format: MacroAttribute,
span: None,
}
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
- name: extnamestr.get().to_str(),
+ name: extnamestr.get().to_strbuf(),
format: MacroBang,
span: span
}
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
- name: extnamestr.get().to_str(),
+ name: extnamestr.get().to_strbuf(),
format: MacroBang,
span: span
}
Some(MacroDef { name, ext }) => {
// yikes... no idea how to apply the mark to this. I'm afraid
// we're going to have to wait-and-see on this one.
- fld.extsbox.insert(intern(name), ext);
+ fld.extsbox.insert(intern(name.as_slice()), ext);
if attr::contains_name(it.attrs.as_slice(), "macro_export") {
SmallVector::one(it)
} else {
_ => unreachable!()
};
let name = format!("<{} macros>", token::get_ident(crate_name));
+ let name = name.to_strbuf();
for source in macros.iter() {
let item = parse::parse_item_from_source_str(name.clone(),
};
unsafe {
- let registrar: MacroCrateRegistrationFun = match lib.symbol(registrar) {
- Ok(registrar) => registrar,
- // again fatal if we can't register macros
- Err(err) => fld.cx.span_fatal(krate.span, err)
- };
+ let registrar: MacroCrateRegistrationFun =
+ match lib.symbol(registrar.as_slice()) {
+ Ok(registrar) => registrar,
+ // again fatal if we can't register macros
+ Err(err) => fld.cx.span_fatal(krate.span, err)
+ };
registrar(|name, extension| {
let extension = match extension {
NormalTT(ext, _) => NormalTT(ext, Some(krate.span)),
fld.cx.bt_push(ExpnInfo {
call_site: s.span,
callee: NameAndSpan {
- name: extnamestr.get().to_str(),
+ name: extnamestr.get().to_strbuf(),
format: MacroBang,
span: exp_span,
}
#[should_fail]
#[test] fn macros_cant_escape_fns_test () {
let src = "fn bogus() {macro_rules! z (() => (3+4))}\
- fn inty() -> int { z!() }".to_owned();
+ fn inty() -> int { z!() }".to_strbuf();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
- "<test>".to_owned(),
+ "<test>".to_strbuf(),
src,
Vec::new(), &sess);
// should fail:
#[should_fail]
#[test] fn macros_cant_escape_mods_test () {
let src = "mod foo {macro_rules! z (() => (3+4))}\
- fn inty() -> int { z!() }".to_owned();
+ fn inty() -> int { z!() }".to_strbuf();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
- "<test>".to_owned(),
+ "<test>".to_strbuf(),
src,
Vec::new(), &sess);
// should fail:
// macro_escape modules shouldn't cause macros to leave scope
#[test] fn macros_can_escape_flattened_mods_test () {
let src = "#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
- fn inty() -> int { z!() }".to_owned();
+ fn inty() -> int { z!() }".to_strbuf();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
- "<test>".to_owned(),
+ "<test>".to_strbuf(),
src,
Vec::new(), &sess);
// should fail:
}
}
- fn expand_crate_str(crate_str: ~str) -> ast::Crate {
+ fn expand_crate_str(crate_str: StrBuf) -> ast::Crate {
let ps = parse::new_parse_sess();
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
// the cfg argument actually does matter, here...
// println!("expanded: {:?}\n",expanded_ast);
//mtwt_resolve_crate(expanded_ast)
//}
- //fn expand_and_resolve_and_pretty_print (crate_str: @str) -> ~str {
+ //fn expand_and_resolve_and_pretty_print (crate_str: @str) -> StrBuf {
//let resolved_ast = expand_and_resolve(crate_str);
//pprust::to_str(&resolved_ast,fake_print_crate,get_ident_interner())
//}
#[test] fn macro_tokens_should_match(){
- expand_crate_str("macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_owned());
+ expand_crate_str(
+ "macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_strbuf());
}
// renaming tests expand a crate and then check that the bindings match
let (teststr, bound_connections, bound_ident_check) = match *t {
(ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic)
};
- let cr = expand_crate_str(teststr.to_owned());
+ let cr = expand_crate_str(teststr.to_strbuf());
// find the bindings:
let mut name_finder = new_name_finder(Vec::new());
visit::walk_crate(&mut name_finder,&cr,());
let crate_str = "macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
foo_module!()
-".to_owned();
+".to_strbuf();
let cr = expand_crate_str(crate_str);
// find the xx binding
let mut name_finder = new_name_finder(Vec::new());
#[test]
fn pat_idents(){
- let pat = string_to_pat("(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_owned());
+ let pat = string_to_pat(
+ "(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_strbuf());
let mut pat_idents = new_name_finder(Vec::new());
pat_idents.visit_pat(pat, ());
assert_eq!(pat_idents.ident_accumulator,
use parse::token;
use rsparse = parse;
-use std::fmt::parse;
+use parse = fmt_macros;
use collections::{HashMap, HashSet};
#[deriving(Eq)]
enum ArgumentType {
- Known(~str),
+ Known(StrBuf),
Unsigned,
String,
}
enum Position {
Exact(uint),
- Named(~str),
+ Named(StrBuf),
}
struct Context<'a, 'b> {
// Note that we keep a side-array of the ordering of the named arguments
// found to be sure that we can translate them in the same order that they
// were declared in.
- names: HashMap<~str, @ast::Expr>,
- name_types: HashMap<~str, ArgumentType>,
- name_ordering: Vec<~str>,
+ names: HashMap<StrBuf, @ast::Expr>,
+ name_types: HashMap<StrBuf, ArgumentType>,
+ name_ordering: Vec<StrBuf>,
// Collection of the compiled `rt::Piece` structures
pieces: Vec<@ast::Expr> ,
- name_positions: HashMap<~str, uint>,
+ name_positions: HashMap<StrBuf, uint>,
method_statics: Vec<@ast::Item> ,
// Updated as arguments are consumed or methods are entered
/// Some((fmtstr, unnamed arguments, ordering of named arguments,
/// named arguments))
fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
- -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>,
- HashMap<~str, @ast::Expr>)>) {
+ -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<StrBuf>,
+ HashMap<StrBuf, @ast::Expr>)>) {
let mut args = Vec::new();
- let mut names = HashMap::<~str, @ast::Expr>::new();
+ let mut names = HashMap::<StrBuf, @ast::Expr>::new();
let mut order = Vec::new();
let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),
continue
}
}
- order.push(name.to_str());
- names.insert(name.to_str(), e);
+ order.push(name.to_strbuf());
+ names.insert(name.to_strbuf(), e);
} else {
args.push(p.parse_expr());
}
Exact(i)
}
parse::ArgumentIs(i) => Exact(i),
- parse::ArgumentNamed(s) => Named(s.to_str()),
+ parse::ArgumentNamed(s) => Named(s.to_strbuf()),
};
// and finally the method being applied
match arg.method {
None => {
- let ty = Known(arg.format.ty.to_str());
+ let ty = Known(arg.format.ty.to_strbuf());
self.verify_arg_type(pos, ty);
}
Some(ref method) => { self.verify_method(pos, *method); }
self.verify_arg_type(Exact(i), Unsigned);
}
parse::CountIsName(s) => {
- self.verify_arg_type(Named(s.to_str()), Unsigned);
+ self.verify_arg_type(Named(s.to_strbuf()), Unsigned);
}
parse::CountIsNextParam => {
if self.check_positional_ok() {
parse::Keyword(name) => {
self.ecx.span_err(self.fmtsp,
format!("duplicate selector \
- `{:?}`", name));
+ `{}`", name));
}
parse::Literal(idx) => {
self.ecx.span_err(self.fmtsp,
return vec!(unnamed, allow_dead_code);
}
- fn parsepath(&self, s: &str) -> Vec<ast::Ident> {
- vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"),
- self.ecx.ident_of("parse"), self.ecx.ident_of(s))
- }
-
fn rtpath(&self, s: &str) -> Vec<ast::Ident> {
vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"),
self.ecx.ident_of("rt"), self.ecx.ident_of(s))
}
- fn ctpath(&self, s: &str) -> Vec<ast::Ident> {
- vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"),
- self.ecx.ident_of("parse"), self.ecx.ident_of(s))
- }
-
fn none(&self) -> @ast::Expr {
let none = self.ecx.path_global(self.fmtsp, vec!(
self.ecx.ident_of("std"),
}).collect();
let (lr, selarg) = match arm.selector {
parse::Keyword(t) => {
- let p = self.ctpath(format!("{:?}", t));
+ let p = self.rtpath(t.to_str());
let p = self.ecx.path_global(sp, p);
(self.rtpath("Keyword"), self.ecx.expr_path(p))
}
let fill = self.ecx.expr_lit(sp, ast::LitChar(fill));
let align = match arg.format.align {
parse::AlignLeft => {
- self.ecx.path_global(sp, self.parsepath("AlignLeft"))
+ self.ecx.path_global(sp, self.rtpath("AlignLeft"))
}
parse::AlignRight => {
- self.ecx.path_global(sp, self.parsepath("AlignRight"))
+ self.ecx.path_global(sp, self.rtpath("AlignRight"))
}
parse::AlignUnknown => {
- self.ecx.path_global(sp, self.parsepath("AlignUnknown"))
+ self.ecx.path_global(sp, self.rtpath("AlignUnknown"))
}
};
let align = self.ecx.expr_path(align);
pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
extra: @ast::Expr,
efmt: @ast::Expr, args: Vec<@ast::Expr>,
- name_ordering: Vec<~str>,
- names: HashMap<~str, @ast::Expr>) -> @ast::Expr {
+ name_ordering: Vec<StrBuf>,
+ names: HashMap<StrBuf, @ast::Expr>) -> @ast::Expr {
let arg_types = Vec::from_fn(args.len(), |_| None);
let mut cx = Context {
ecx: ecx,
trait ToSource : ToTokens {
// Takes a thing and generates a string containing rust code for it.
- pub fn to_source() -> ~str;
+ pub fn to_source() -> StrBuf;
// If you can make source, you can definitely make tokens.
pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] {
pub trait ToSource {
// Takes a thing and generates a string containing rust code for it.
- fn to_source(&self) -> ~str;
+ fn to_source(&self) -> StrBuf;
}
impl ToSource for ast::Ident {
- fn to_source(&self) -> ~str {
- get_ident(*self).get().to_str()
+ fn to_source(&self) -> StrBuf {
+ get_ident(*self).get().to_strbuf()
}
}
impl ToSource for @ast::Item {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
pprust::item_to_str(*self)
}
}
impl<'a> ToSource for &'a [@ast::Item] {
- fn to_source(&self) -> ~str {
- self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect("\n\n")
+ fn to_source(&self) -> StrBuf {
+ self.iter()
+ .map(|i| i.to_source())
+ .collect::<Vec<StrBuf>>()
+ .connect("\n\n")
+ .to_strbuf()
}
}
impl ToSource for ast::Ty {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
pprust::ty_to_str(self)
}
}
impl<'a> ToSource for &'a [ast::Ty] {
- fn to_source(&self) -> ~str {
- self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect(", ")
+ fn to_source(&self) -> StrBuf {
+ self.iter()
+ .map(|i| i.to_source())
+ .collect::<Vec<StrBuf>>()
+ .connect(", ")
+ .to_strbuf()
}
}
impl ToSource for Generics {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
pprust::generics_to_str(self)
}
}
impl ToSource for @ast::Expr {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
pprust::expr_to_str(*self)
}
}
impl ToSource for ast::Block {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
pprust::block_to_str(self)
}
}
impl<'a> ToSource for &'a str {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitStr(
token::intern_and_get_ident(*self), ast::CookedStr));
pprust::lit_to_str(&lit)
}
impl ToSource for () {
- fn to_source(&self) -> ~str {
- "()".to_owned()
+ fn to_source(&self) -> StrBuf {
+ "()".to_strbuf()
}
}
impl ToSource for bool {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitBool(*self));
pprust::lit_to_str(&lit)
}
}
impl ToSource for char {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitChar(*self));
pprust::lit_to_str(&lit)
}
}
impl ToSource for int {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI));
pprust::lit_to_str(&lit)
}
}
impl ToSource for i8 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8));
pprust::lit_to_str(&lit)
}
}
impl ToSource for i16 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16));
pprust::lit_to_str(&lit)
}
impl ToSource for i32 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32));
pprust::lit_to_str(&lit)
}
}
impl ToSource for i64 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64));
pprust::lit_to_str(&lit)
}
}
impl ToSource for uint {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU));
pprust::lit_to_str(&lit)
}
}
impl ToSource for u8 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8));
pprust::lit_to_str(&lit)
}
}
impl ToSource for u16 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16));
pprust::lit_to_str(&lit)
}
}
impl ToSource for u32 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32));
pprust::lit_to_str(&lit)
}
}
impl ToSource for u64 {
- fn to_source(&self) -> ~str {
+ fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64));
pprust::lit_to_str(&lit)
}
impl_to_tokens!(u64)
pub trait ExtParseUtils {
- fn parse_item(&self, s: ~str) -> @ast::Item;
- fn parse_expr(&self, s: ~str) -> @ast::Expr;
- fn parse_stmt(&self, s: ~str) -> @ast::Stmt;
- fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> ;
+ fn parse_item(&self, s: StrBuf) -> @ast::Item;
+ fn parse_expr(&self, s: StrBuf) -> @ast::Expr;
+ fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt;
+ fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> ;
}
impl<'a> ExtParseUtils for ExtCtxt<'a> {
- fn parse_item(&self, s: ~str) -> @ast::Item {
+ fn parse_item(&self, s: StrBuf) -> @ast::Item {
let res = parse::parse_item_from_source_str(
- "<quote expansion>".to_str(),
+ "<quote expansion>".to_strbuf(),
s,
self.cfg(),
self.parse_sess());
}
}
- fn parse_stmt(&self, s: ~str) -> @ast::Stmt {
- parse::parse_stmt_from_source_str("<quote expansion>".to_str(),
+ fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt {
+ parse::parse_stmt_from_source_str("<quote expansion>".to_strbuf(),
s,
self.cfg(),
Vec::new(),
self.parse_sess())
}
- fn parse_expr(&self, s: ~str) -> @ast::Expr {
- parse::parse_expr_from_source_str("<quote expansion>".to_str(),
+ fn parse_expr(&self, s: StrBuf) -> @ast::Expr {
+ parse::parse_expr_from_source_str("<quote expansion>".to_strbuf(),
s,
self.cfg(),
self.parse_sess())
}
- fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> {
- parse::parse_tts_from_source_str("<quote expansion>".to_str(),
+ fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> {
+ parse::parse_tts_from_source_str("<quote expansion>".to_strbuf(),
s,
self.cfg(),
self.parse_sess())
base::MacExpr::new(expanded)
}
-fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> {
- strs.iter().map(|str| str_to_ident(*str)).collect()
+fn ids_ext(strs: Vec<StrBuf> ) -> Vec<ast::Ident> {
+ strs.iter().map(|str| str_to_ident((*str).as_slice())).collect()
}
fn id_ext(str: &str) -> ast::Ident {
sp: Span,
cx_expr: @ast::Expr,
expr: @ast::Expr) -> @ast::Expr {
- let uses = vec!( cx.view_use_glob(sp, ast::Inherited,
- ids_ext(vec!("syntax".to_owned(),
- "ext".to_owned(),
- "quote".to_owned(),
- "rt".to_owned()))) );
+ let uses = vec![ cx.view_use_glob(sp, ast::Inherited,
+ ids_ext(vec!["syntax".to_strbuf(),
+ "ext".to_strbuf(),
+ "quote".to_strbuf(),
+ "rt".to_strbuf()])) ];
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr);
let topmost = topmost_expn_info(cx.backtrace().unwrap());
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
- let filename = token::intern_and_get_ident(loc.file.name);
+ let filename = token::intern_and_get_ident(loc.file.name.as_slice());
base::MacExpr::new(cx.expr_str(topmost.call_site, filename))
}
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult> {
let s = pprust::tts_to_str(tts);
- base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(s)))
+ base::MacExpr::new(cx.expr_str(sp,
+ token::intern_and_get_ident(s.as_slice())))
}
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path()
.iter()
- .map(|x| token::get_ident(*x).get().to_str())
- .collect::<Vec<~str>>()
+ .map(|x| token::get_ident(*x).get().to_strbuf())
+ .collect::<Vec<StrBuf>>()
.connect("::");
base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(string)))
}
Some(src) => {
// Add this input file to the code map to make it available as
// dependency information
- let filename = file.display().to_str();
+ let filename = file.display().to_str().to_strbuf();
let interned = token::intern_and_get_ident(src);
- cx.codemap().new_filemap(filename, src.to_owned());
+ cx.codemap().new_filemap(filename, src.to_strbuf());
base::MacExpr::new(cx.expr_str(sp, interned))
}
..
} => {
// Don't recurse into file using "include!"
- if "include" == *name {
+ if "include" == name.as_slice() {
expn_info
} else {
topmost_expn_info(next_expn_info)
pub enum ParseResult {
Success(HashMap<Ident, Rc<NamedMatch>>),
- Failure(codemap::Span, ~str),
- Error(codemap::Span, ~str)
+ Failure(codemap::Span, StrBuf),
+ Error(codemap::Span, StrBuf)
}
pub fn parse_or_else(sess: &ParseSess,
-> HashMap<Ident, Rc<NamedMatch>> {
match parse(sess, cfg, rdr, ms.as_slice()) {
Success(m) => m,
- Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str),
- Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
+ Failure(sp, str) => {
+ sess.span_diagnostic.span_fatal(sp, str.as_slice())
+ }
+ Error(sp, str) => {
+ sess.span_diagnostic.span_fatal(sp, str.as_slice())
+ }
}
}
}
return Success(nameize(sess, ms, v.as_slice()));
} else if eof_eis.len() > 1u {
- return Error(sp, "ambiguity: multiple successful parses".to_owned());
+ return Error(sp, "ambiguity: multiple successful parses".to_strbuf());
} else {
- return Failure(sp, "unexpected end of macro invocation".to_owned());
+ return Failure(sp, "unexpected end of macro invocation".to_strbuf());
}
} else {
if (bb_eis.len() > 0u && next_eis.len() > 0u)
let nts = bb_eis.iter().map(|ei| {
match ei.elts.get(ei.idx).node {
MatchNonterminal(bind, name, _) => {
- format!("{} ('{}')",
+ (format!("{} ('{}')",
token::get_ident(name),
- token::get_ident(bind))
+ token::get_ident(bind))).to_strbuf()
}
_ => fail!()
- } }).collect::<Vec<~str>>().connect(" or ");
+ } }).collect::<Vec<StrBuf>>().connect(" or ");
return Error(sp, format!(
"local ambiguity: multiple parsing options: \
built-in NTs {} or {} other options.",
- nts, next_eis.len()));
+ nts, next_eis.len()).to_strbuf());
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
return Failure(sp, format!("no rules expected the token `{}`",
- token::to_str(&tok)));
+ token::to_str(&tok)).to_strbuf());
} else if next_eis.len() > 0u {
/* Now process the next token */
while next_eis.len() > 0u {
token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
_ => {
let token_str = token::to_str(&p.token);
- p.fatal("expected ident, found ".to_owned() + token_str)
+ p.fatal((format!("expected ident, found {}",
+ token_str.as_slice())).as_slice())
}
},
"path" => {
// Which arm's failure should we report? (the one furthest along)
let mut best_fail_spot = DUMMY_SP;
- let mut best_fail_msg = "internal error: ran no matchers".to_owned();
+ let mut best_fail_msg = "internal error: ran no matchers".to_strbuf();
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
match **lhs {
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
- Error(sp, ref msg) => cx.span_fatal(sp, (*msg))
+ Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice())
}
}
_ => cx.bug("non-matcher found in parsed lhses")
}
}
- cx.span_fatal(best_fail_spot, best_fail_msg);
+ cx.span_fatal(best_fail_spot, best_fail_msg.as_slice());
}
// this procedure performs the expansion of the
box MacroRulesDefiner {
def: RefCell::new(Some(MacroDef {
- name: token::get_ident(name).to_str(),
+ name: token::get_ident(name).to_str().to_strbuf(),
ext: NormalTT(exp, Some(sp))
}))
} as Box<MacResult>
enum LockstepIterSize {
LisUnconstrained,
LisConstraint(uint, Ident),
- LisContradiction(~str),
+ LisContradiction(StrBuf),
}
fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
let r_n = token::get_ident(r_id);
LisContradiction(format!("inconsistent lockstep iteration: \
'{}' has {} items, but '{}' has {}",
- l_n, l_len, r_n, r_len))
+ l_n, l_len, r_n, r_len).to_strbuf())
}
}
}
}
LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- r.sp_diag.span_fatal(sp.clone(), *msg);
+ r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
}
LisConstraint(len, _) => {
if len == 0 {
let pred_val = $pred;
let a_val = $a;
let b_val = $b;
- if !(pred_val(a_val,b_val)) {
+ if !(pred_val(a_val.as_slice(),b_val.as_slice())) {
fail!("expected args satisfying {}, got {:?} and {:?}",
$predname, a_val, b_val);
}
#[test] fn ident_transformation () {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
- "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_owned());
+ "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_strbuf());
let folded_crate = zz_fold.fold_crate(ast);
- assert_pred!(matches_codepattern,
- "matches_codepattern",
- pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
- "#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_owned());
+ assert_pred!(
+ matches_codepattern,
+ "matches_codepattern",
+ pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
+ "#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_strbuf());
}
// even inside macro defs....
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
- (g $(d $d $e)+))} ".to_owned());
+ (g $(d $d $e)+))} ".to_strbuf());
let folded_crate = zz_fold.fold_crate(ast);
- assert_pred!(matches_codepattern,
- "matches_codepattern",
- pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
- "zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))".to_owned());
+ assert_pred!(
+ matches_codepattern,
+ "matches_codepattern",
+ pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
+ "zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))".to_strbuf());
}
}
extern crate collections;
#[phase(syntax, link)]
extern crate log;
+extern crate fmt_macros;
pub mod util {
pub mod interner;
#[deriving(Clone)]
pub struct Comment {
pub style: CommentStyle,
- pub lines: Vec<~str>,
+ pub lines: Vec<StrBuf>,
pub pos: BytePos,
}
}
}
-pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
+pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf {
/// remove whitespace-only lines from the start/end of lines
- fn vertical_trim(lines: Vec<~str> ) -> Vec<~str> {
+ fn vertical_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {
let mut i = 0u;
let mut j = lines.len();
// first line of all-stars should be omitted
- if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') {
+ if lines.len() > 0 &&
+ lines.get(0).as_slice().chars().all(|c| c == '*') {
i += 1;
}
- while i < j && lines.get(i).trim().is_empty() {
+ while i < j && lines.get(i).as_slice().trim().is_empty() {
i += 1;
}
// like the first, a last line of all stars should be omitted
- if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') {
+ if j > i && lines.get(j - 1)
+ .as_slice()
+ .chars()
+ .skip(1)
+ .all(|c| c == '*') {
j -= 1;
}
- while j > i && lines.get(j - 1).trim().is_empty() {
+ while j > i && lines.get(j - 1).as_slice().trim().is_empty() {
j -= 1;
}
return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
}
/// remove a "[ \t]*\*" block from each line, if possible
- fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> {
+ fn horizontal_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {
let mut i = uint::MAX;
let mut can_trim = true;
let mut first = true;
for line in lines.iter() {
- for (j, c) in line.chars().enumerate() {
+ for (j, c) in line.as_slice().chars().enumerate() {
if j > i || !"* \t".contains_char(c) {
can_trim = false;
break;
}
if can_trim {
- lines.iter().map(|line| line.slice(i + 1, line.len()).to_owned()).collect()
+ lines.iter().map(|line| {
+ line.as_slice().slice(i + 1, line.len()).to_strbuf()
+ }).collect()
} else {
lines
}
static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
for prefix in ONLINERS.iter() {
if comment.starts_with(*prefix) {
- return comment.slice_from(prefix.len()).to_owned();
+ return comment.slice_from(prefix.len()).to_strbuf();
}
}
if comment.starts_with("/*") {
let lines = comment.slice(3u, comment.len() - 2u)
.lines_any()
- .map(|s| s.to_owned())
- .collect::<Vec<~str> >();
+ .map(|s| s.to_strbuf())
+ .collect::<Vec<StrBuf> >();
let lines = vertical_trim(lines);
let lines = horizontal_trim(lines);
- return lines.connect("\n");
+ return lines.connect("\n").to_strbuf();
}
fail!("not a doc-comment: {}", comment);
}
-fn read_to_eol(rdr: &mut StringReader) -> ~str {
+fn read_to_eol(rdr: &mut StringReader) -> StrBuf {
let mut val = StrBuf::new();
while !rdr.curr_is('\n') && !is_eof(rdr) {
val.push_char(rdr.curr.unwrap());
bump(rdr);
}
if rdr.curr_is('\n') { bump(rdr); }
- return val.into_owned();
+ return val
}
-fn read_one_line_comment(rdr: &mut StringReader) -> ~str {
+fn read_one_line_comment(rdr: &mut StringReader) -> StrBuf {
let val = read_to_eol(rdr);
- assert!((val[0] == '/' as u8 && val[1] == '/' as u8) ||
- (val[0] == '#' as u8 && val[1] == '!' as u8));
+ assert!((val.as_slice()[0] == '/' as u8 &&
+ val.as_slice()[1] == '/' as u8) ||
+ (val.as_slice()[0] == '#' as u8 &&
+ val.as_slice()[1] == '!' as u8));
return val;
}
comments: &mut Vec<Comment>) {
debug!(">>> line comments");
let p = rdr.last_pos;
- let mut lines: Vec<~str> = Vec::new();
+ let mut lines: Vec<StrBuf> = Vec::new();
while rdr.curr_is('/') && nextch_is(rdr, '/') {
let line = read_one_line_comment(rdr);
debug!("{}", line);
- if is_doc_comment(line) { // doc-comments are not put in comments
+ // Doc comments are not put in comments.
+ if is_doc_comment(line.as_slice()) {
break;
}
lines.push(line);
return Some(cursor);
}
-fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<~str> ,
- s: ~str, col: CharPos) {
+fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<StrBuf> ,
+ s: StrBuf, col: CharPos) {
let len = s.len();
- let s1 = match all_whitespace(s, col) {
+ let s1 = match all_whitespace(s.as_slice(), col) {
Some(col) => {
if col < len {
- s.slice(col, len).to_owned()
- } else { "".to_owned() }
+ s.as_slice().slice(col, len).to_strbuf()
+ } else {
+ "".to_strbuf()
+ }
}
None => s,
};
comments: &mut Vec<Comment> ) {
debug!(">>> block comment");
let p = rdr.last_pos;
- let mut lines: Vec<~str> = Vec::new();
+ let mut lines: Vec<StrBuf> = Vec::new();
let col = rdr.col;
bump(rdr);
bump(rdr);
return
}
assert!(!curr_line.as_slice().contains_char('\n'));
- lines.push(curr_line.into_owned());
+ lines.push(curr_line);
} else {
let mut level: int = 1;
while level > 0 {
debug!("=== block comment level {}", level);
if is_eof(rdr) {
- rdr.fatal("unterminated block comment".to_owned());
+ rdr.fatal("unterminated block comment".to_strbuf());
}
if rdr.curr_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines,
- curr_line.into_owned(),
+ curr_line,
col);
curr_line = StrBuf::new();
bump(rdr);
}
if curr_line.len() != 0 {
trim_whitespace_prefix_and_push_line(&mut lines,
- curr_line.into_owned(),
+ curr_line,
col);
}
}
#[deriving(Clone)]
pub struct Literal {
- pub lit: ~str,
+ pub lit: StrBuf,
pub pos: BytePos,
}
// probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic:
&diagnostic::SpanHandler,
- path: ~str,
+ path: StrBuf,
srdr: &mut io::Reader)
-> (Vec<Comment>, Vec<Literal>) {
let src = srdr.read_to_end().unwrap();
- let src = str::from_utf8(src.as_slice()).unwrap().to_owned();
+ let src = str::from_utf8(src.as_slice()).unwrap().to_strbuf();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src);
let mut rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);
if token::is_lit(&tok) {
with_str_from(&rdr, bstart, |s| {
debug!("tok lit: {}", s);
- literals.push(Literal {lit: s.to_owned(), pos: sp.lo});
+ literals.push(Literal {lit: s.to_strbuf(), pos: sp.lo});
})
} else {
debug!("tok: {}", token::to_str(&tok));
#[test] fn test_block_doc_comment_1() {
let comment = "/**\n * Test \n ** Test\n * Test\n*/";
let stripped = strip_doc_comment_decoration(comment);
- assert_eq!(stripped, " Test \n* Test\n Test".to_owned());
+ assert_eq!(stripped, " Test \n* Test\n Test".to_strbuf());
}
#[test] fn test_block_doc_comment_2() {
let comment = "/**\n * Test\n * Test\n*/";
let stripped = strip_doc_comment_decoration(comment);
- assert_eq!(stripped, " Test\n Test".to_owned());
+ assert_eq!(stripped, " Test\n Test".to_strbuf());
}
#[test] fn test_block_doc_comment_3() {
let comment = "/**\n let a: *int;\n *a = 5;\n*/";
let stripped = strip_doc_comment_decoration(comment);
- assert_eq!(stripped, " let a: *int;\n *a = 5;".to_owned());
+ assert_eq!(stripped, " let a: *int;\n *a = 5;".to_strbuf());
}
#[test] fn test_block_doc_comment_4() {
let comment = "/*******************\n test\n *********************/";
let stripped = strip_doc_comment_decoration(comment);
- assert_eq!(stripped, " test".to_owned());
+ assert_eq!(stripped, " test".to_strbuf());
}
#[test] fn test_line_doc_comment() {
let stripped = strip_doc_comment_decoration("/// test");
- assert_eq!(stripped, " test".to_owned());
+ assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("///! test");
- assert_eq!(stripped, " test".to_owned());
+ assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("// test");
- assert_eq!(stripped, " test".to_owned());
+ assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("// test");
- assert_eq!(stripped, " test".to_owned());
+ assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("///test");
- assert_eq!(stripped, "test".to_owned());
+ assert_eq!(stripped, "test".to_strbuf());
let stripped = strip_doc_comment_decoration("///!test");
- assert_eq!(stripped, "test".to_owned());
+ assert_eq!(stripped, "test".to_strbuf());
let stripped = strip_doc_comment_decoration("//test");
- assert_eq!(stripped, "test".to_owned());
+ assert_eq!(stripped, "test".to_strbuf());
}
}
pub trait Reader {
fn is_eof(&self) -> bool;
fn next_token(&mut self) -> TokenAndSpan;
- fn fatal(&self, ~str) -> !;
+ fn fatal(&self, StrBuf) -> !;
fn span_diag<'a>(&'a self) -> &'a SpanHandler;
fn peek(&self) -> TokenAndSpan;
}
string_advance_token(self);
ret_val
}
- fn fatal(&self, m: ~str) -> ! {
- self.span_diagnostic.span_fatal(self.peek_span, m)
+ fn fatal(&self, m: StrBuf) -> ! {
+ self.span_diagnostic.span_fatal(self.peek_span, m.as_slice())
}
fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic }
fn peek(&self) -> TokenAndSpan {
debug!("TtReader: r={:?}", r);
r
}
- fn fatal(&self, m: ~str) -> ! {
- self.sp_diag.span_fatal(self.cur_span, m);
+ fn fatal(&self, m: StrBuf) -> ! {
+ self.sp_diag.span_fatal(self.cur_span, m.as_slice());
}
fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag }
fn peek(&self) -> TokenAndSpan {
fn fatal_span(rdr: &mut StringReader,
from_pos: BytePos,
to_pos: BytePos,
- m: ~str)
+ m: StrBuf)
-> ! {
rdr.peek_span = codemap::mk_sp(from_pos, to_pos);
rdr.fatal(m);
fn fatal_span_char(rdr: &mut StringReader,
from_pos: BytePos,
to_pos: BytePos,
- m: ~str,
+ m: StrBuf,
c: char)
-> ! {
- let mut m = StrBuf::from_owned_str(m);
+ let mut m = m;
m.push_str(": ");
char::escape_default(c, |c| m.push_char(c));
- fatal_span(rdr, from_pos, to_pos, m.into_owned());
+ fatal_span(rdr, from_pos, to_pos, m.into_strbuf());
}
// report a lexical error spanning [`from_pos`, `to_pos`), appending the
fn fatal_span_verbose(rdr: &mut StringReader,
from_pos: BytePos,
to_pos: BytePos,
- m: ~str)
+ m: StrBuf)
-> ! {
- let mut m = StrBuf::from_owned_str(m);
+ let mut m = m;
m.push_str(": ");
let from = byte_offset(rdr, from_pos).to_uint();
let to = byte_offset(rdr, to_pos).to_uint();
- m.push_str(rdr.filemap.src.slice(from, to));
- fatal_span(rdr, from_pos, to_pos, m.into_owned());
+ m.push_str(rdr.filemap.src.as_slice().slice(from, to));
+ fatal_span(rdr, from_pos, to_pos, m);
}
// EFFECT: advance peek_tok and peek_span to refer to the next token.
end: BytePos,
f: |s: &str| -> T)
-> T {
- f(rdr.filemap.src.slice(
+ f(rdr.filemap.src.as_slice().slice(
byte_offset(rdr, start).to_uint(),
byte_offset(rdr, end).to_uint()))
}
if current_byte_offset < rdr.filemap.src.len() {
assert!(rdr.curr.is_some());
let last_char = rdr.curr.unwrap();
- let next = rdr.filemap.src.char_range_at(current_byte_offset);
+ let next = rdr.filemap
+ .src
+ .as_slice()
+ .char_range_at(current_byte_offset);
let byte_offset_diff = next.next - current_byte_offset;
rdr.pos = rdr.pos + Pos::from_uint(byte_offset_diff);
rdr.curr = Some(next.ch);
pub fn nextch(rdr: &StringReader) -> Option<char> {
let offset = byte_offset(rdr, rdr.pos).to_uint();
if offset < rdr.filemap.src.len() {
- Some(rdr.filemap.src.char_at(offset))
+ Some(rdr.filemap.src.as_slice().char_at(offset))
} else {
None
}
while level > 0 {
if is_eof(rdr) {
let msg = if is_doc_comment {
- "unterminated block doc-comment".to_owned()
+ "unterminated block doc-comment".to_strbuf()
} else {
- "unterminated block comment".to_owned()
+ "unterminated block comment".to_strbuf()
};
fatal_span(rdr, start_bpos, rdr.last_pos, msg);
} else if rdr.curr_is('/') && nextch_is(rdr, '*') {
if res.is_some() { res } else { consume_whitespace_and_comments(rdr) }
}
-fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<~str> {
+fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<StrBuf> {
// \x00 hits the `return None` case immediately, so this is fine.
let mut c = rdr.curr.unwrap_or('\x00');
let mut rslt = StrBuf::new();
}
let exponent = scan_digits(rdr, 10u);
if exponent.len() > 0u {
- rslt.push_str(exponent);
- return Some(rslt.into_owned());
+ rslt.push_str(exponent.as_slice());
+ return Some(rslt);
} else {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "scan_exponent: bad fp literal".to_owned());
+ "scan_exponent: bad fp literal".to_strbuf());
}
- } else { return None::<~str>; }
+ } else {
+ return None::<StrBuf>;
+ }
}
-fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str {
+fn scan_digits(rdr: &mut StringReader, radix: uint) -> StrBuf {
let mut rslt = StrBuf::new();
loop {
let c = rdr.curr;
rslt.push_char(c.unwrap());
bump(rdr);
}
- _ => return rslt.into_owned()
+ _ => return rslt
}
};
}
fn check_float_base(rdr: &mut StringReader, start_bpos: BytePos, last_bpos: BytePos,
base: uint) {
match base {
- 16u => fatal_span(rdr, start_bpos, last_bpos,
- "hexadecimal float literal is not supported".to_owned()),
+ 16u => {
+ fatal_span(rdr, start_bpos, last_bpos,
+ "hexadecimal float literal is not supported".to_strbuf())
+ }
8u => fatal_span(rdr, start_bpos, last_bpos,
- "octal float literal is not supported".to_owned()),
+ "octal float literal is not supported".to_strbuf()),
2u => fatal_span(rdr, start_bpos, last_bpos,
- "binary float literal is not supported".to_owned()),
+ "binary float literal is not supported".to_strbuf()),
_ => ()
}
}
bump(rdr);
base = 2u;
}
- num_str = StrBuf::from_owned_str(scan_digits(rdr, base));
+ num_str = scan_digits(rdr, base);
c = rdr.curr.unwrap_or('\x00');
nextch(rdr);
if c == 'u' || c == 'i' {
}
if num_str.len() == 0u {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "no valid digits found for number".to_owned());
+ "no valid digits found for number".to_strbuf());
}
let parsed = match from_str_radix::<u64>(num_str.as_slice(),
base as uint) {
Some(p) => p,
None => fatal_span(rdr, start_bpos, rdr.last_pos,
- "int literal is too large".to_owned())
+ "int literal is too large".to_strbuf())
};
match tp {
bump(rdr);
let dec_part = scan_digits(rdr, 10u);
num_str.push_char('.');
- num_str.push_str(dec_part);
+ num_str.push_str(dec_part.as_slice());
}
match scan_exponent(rdr, start_bpos) {
Some(ref s) => {
is_float = true;
- num_str.push_str(*s);
+ num_str.push_str(s.as_slice());
}
None => ()
}
return token::LIT_FLOAT(str_to_ident(num_str.as_slice()), ast::TyF128);
}
fatal_span(rdr, start_bpos, rdr.last_pos,
- "expected `f32`, `f64` or `f128` suffix".to_owned());
+ "expected `f32`, `f64` or `f128` suffix".to_strbuf());
}
if is_float {
check_float_base(rdr, start_bpos, rdr.last_pos, base);
} else {
if num_str.len() == 0u {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "no valid digits found for number".to_owned());
+ "no valid digits found for number".to_strbuf());
}
let parsed = match from_str_radix::<u64>(num_str.as_slice(),
base as uint) {
Some(p) => p,
None => fatal_span(rdr, start_bpos, rdr.last_pos,
- "int literal is too large".to_owned())
+ "int literal is too large".to_strbuf())
};
debug!("lexing {} as an unsuffixed integer literal",
while i != 0u && !is_eof(rdr) {
let n = rdr.curr;
if !is_hex_digit(n) {
- fatal_span_char(rdr, rdr.last_pos, rdr.pos,
- "illegal character in numeric character escape".to_owned(),
- n.unwrap());
+ fatal_span_char(
+ rdr,
+ rdr.last_pos,
+ rdr.pos,
+ "illegal character in numeric character escape".to_strbuf(),
+ n.unwrap());
}
bump(rdr);
accum_int *= 16;
}
if i != 0 && is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "unterminated numeric character escape".to_owned());
+ "unterminated numeric character escape".to_strbuf());
}
match char::from_u32(accum_int as u32) {
Some(x) => x,
None => fatal_span(rdr, start_bpos, rdr.last_pos,
- "illegal numeric character escape".to_owned())
+ "illegal numeric character escape".to_strbuf())
}
}
if token::is_keyword(token::keywords::Self, tok) {
fatal_span(rdr, start, rdr.last_pos,
"invalid lifetime name: 'self \
- is no longer a special lifetime".to_owned());
+ is no longer a special lifetime".to_strbuf());
} else if token::is_any_keyword(tok) &&
!token::is_keyword(token::keywords::Static, tok) {
fatal_span(rdr, start, rdr.last_pos,
- "invalid lifetime name".to_owned());
+ "invalid lifetime name".to_strbuf());
} else {
return token::LIFETIME(ident);
}
'u' => scan_numeric_escape(rdr, 4u),
'U' => scan_numeric_escape(rdr, 8u),
c2 => {
- fatal_span_char(rdr, escaped_pos, rdr.last_pos,
- "unknown character escape".to_owned(), c2)
+ fatal_span_char(rdr,
+ escaped_pos,
+ rdr.last_pos,
+ "unknown character \
+ escape".to_strbuf(),
+ c2)
}
}
}
}
}
'\t' | '\n' | '\r' | '\'' => {
- fatal_span_char(rdr, start, rdr.last_pos,
- "character constant must be escaped".to_owned(), c2);
+ fatal_span_char(
+ rdr,
+ start,
+ rdr.last_pos,
+ "character constant must be escaped".to_strbuf(),
+ c2);
}
_ => {}
}
// ascii single quote.
start - BytePos(1),
rdr.last_pos,
- "unterminated character constant".to_owned());
+ "unterminated character constant".to_strbuf());
}
bump(rdr); // advance curr past token
return token::LIT_CHAR(c2);
while !rdr.curr_is('"') {
if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "unterminated double quote string".to_owned());
+ "unterminated double quote string".to_strbuf());
}
let ch = rdr.curr.unwrap();
'\\' => {
if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "unterminated double quote string".to_owned());
+ "unterminated double quote string".to_strbuf());
}
let escaped = rdr.curr.unwrap();
}
c2 => {
fatal_span_char(rdr, escaped_pos, rdr.last_pos,
- "unknown string escape".to_owned(), c2);
+ "unknown string escape".to_strbuf(), c2);
}
}
}
if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "unterminated raw string".to_owned());
+ "unterminated raw string".to_strbuf());
} else if !rdr.curr_is('"') {
fatal_span_char(rdr, start_bpos, rdr.last_pos,
"only `#` is allowed in raw string delimitation; \
- found illegal character".to_owned(),
+ found illegal character".to_strbuf(),
rdr.curr.unwrap());
}
bump(rdr);
'outer: loop {
if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos,
- "unterminated raw string".to_owned());
+ "unterminated raw string".to_strbuf());
}
if rdr.curr_is('"') {
content_end_bpos = rdr.last_pos;
'%' => { return binop(rdr, token::PERCENT); }
c => {
fatal_span_char(rdr, rdr.last_pos, rdr.pos,
- "unknown start of token".to_owned(), c);
+ "unknown start of token".to_strbuf(), c);
}
}
}
// open a string reader for the given string
fn setup<'a>(span_handler: &'a diagnostic::SpanHandler,
- teststr: ~str) -> StringReader<'a> {
- let fm = span_handler.cm.new_filemap("zebra.rs".to_owned(), teststr);
+ teststr: StrBuf) -> StringReader<'a> {
+ let fm = span_handler.cm.new_filemap("zebra.rs".to_strbuf(), teststr);
new_string_reader(span_handler, fm)
}
let span_handler = mk_sh();
let mut string_reader = setup(&span_handler,
"/* my source file */ \
- fn main() { println!(\"zebra\"); }\n".to_owned());
+ fn main() { println!(\"zebra\"); }\n".to_strbuf());
let id = str_to_ident("fn");
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{
}
#[test] fn doublecolonparsing () {
- check_tokenization(setup(&mk_sh(), "a b".to_owned()),
+ check_tokenization(setup(&mk_sh(), "a b".to_strbuf()),
vec!(mk_ident("a",false),
mk_ident("b",false)));
}
#[test] fn dcparsing_2 () {
- check_tokenization(setup(&mk_sh(), "a::b".to_owned()),
+ check_tokenization(setup(&mk_sh(), "a::b".to_strbuf()),
vec!(mk_ident("a",true),
token::MOD_SEP,
mk_ident("b",false)));
}
#[test] fn dcparsing_3 () {
- check_tokenization(setup(&mk_sh(), "a ::b".to_owned()),
+ check_tokenization(setup(&mk_sh(), "a ::b".to_strbuf()),
vec!(mk_ident("a",false),
token::MOD_SEP,
mk_ident("b",false)));
}
#[test] fn dcparsing_4 () {
- check_tokenization(setup(&mk_sh(), "a:: b".to_owned()),
+ check_tokenization(setup(&mk_sh(), "a:: b".to_strbuf()),
vec!(mk_ident("a",true),
token::MOD_SEP,
mk_ident("b",false)));
}
#[test] fn character_a() {
- assert_eq!(setup(&mk_sh(), "'a'".to_owned()).next_token().tok,
+ assert_eq!(setup(&mk_sh(), "'a'".to_strbuf()).next_token().tok,
token::LIT_CHAR('a'));
}
#[test] fn character_space() {
- assert_eq!(setup(&mk_sh(), "' '".to_owned()).next_token().tok,
+ assert_eq!(setup(&mk_sh(), "' '".to_strbuf()).next_token().tok,
token::LIT_CHAR(' '));
}
#[test] fn character_escaped() {
- assert_eq!(setup(&mk_sh(), "'\\n'".to_owned()).next_token().tok,
+ assert_eq!(setup(&mk_sh(), "'\\n'".to_strbuf()).next_token().tok,
token::LIT_CHAR('\n'));
}
#[test] fn lifetime_name() {
- assert_eq!(setup(&mk_sh(), "'abc".to_owned()).next_token().tok,
+ assert_eq!(setup(&mk_sh(), "'abc".to_strbuf()).next_token().tok,
token::LIFETIME(token::str_to_ident("abc")));
}
#[test] fn raw_string() {
- assert_eq!(setup(&mk_sh(), "r###\"\"#a\\b\x00c\"\"###".to_owned()).next_token().tok,
+ assert_eq!(setup(&mk_sh(),
+ "r###\"\"#a\\b\x00c\"\"###".to_strbuf()).next_token()
+ .tok,
token::LIT_STR_RAW(token::str_to_ident("\"#a\\b\x00c\""), 3));
}
}
#[test] fn nested_block_comments() {
- assert_eq!(setup(&mk_sh(), "/* /* */ */'a'".to_owned()).next_token().tok,
+ assert_eq!(setup(&mk_sh(),
+ "/* /* */ */'a'".to_strbuf()).next_token().tok,
token::LIT_CHAR('a'));
}
inner
}
-pub fn parse_crate_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_crate_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> ast::Crate {
maybe_aborted(p.parse_crate_mod(),p)
}
-pub fn parse_crate_attrs_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_crate_attrs_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> Vec<ast::Attribute> {
inner
}
-pub fn parse_expr_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_expr_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> @ast::Expr {
maybe_aborted(p.parse_expr(), p)
}
-pub fn parse_item_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_item_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> Option<@ast::Item> {
maybe_aborted(p.parse_item(attrs),p)
}
-pub fn parse_meta_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_meta_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> @ast::MetaItem {
maybe_aborted(p.parse_meta_item(),p)
}
-pub fn parse_stmt_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_stmt_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
attrs: Vec<ast::Attribute> ,
sess: &ParseSess)
maybe_aborted(p.parse_stmt(attrs),p)
}
-pub fn parse_tts_from_source_str(name: ~str,
- source: ~str,
+pub fn parse_tts_from_source_str(name: StrBuf,
+ source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> Vec<ast::TokenTree> {
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
- name: ~str,
- source: ~str)
+ name: StrBuf,
+ source: StrBuf)
-> Parser<'a> {
filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
}
};
match str::from_utf8(bytes.as_slice()) {
Some(s) => {
- return string_to_filemap(sess, s.to_owned(),
- path.as_str().unwrap().to_str())
+ return string_to_filemap(sess, s.to_strbuf(),
+ path.as_str().unwrap().to_strbuf())
}
None => err(format!("{} is not UTF-8 encoded", path.display())),
}
// given a session and a string, add the string to
// the session's codemap and return the new filemap
-pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str)
+pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf)
-> Rc<FileMap> {
sess.span_diagnostic.cm.new_filemap(path, source)
}
use util::parser_testing::{string_to_expr, string_to_item};
use util::parser_testing::string_to_stmt;
- fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> ~str {
+ fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> StrBuf {
let mut writer = MemWriter::new();
let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer);
let _ = val.encode(&mut encoder);
- str::from_utf8(writer.unwrap().as_slice()).unwrap().to_owned()
+ str::from_utf8(writer.unwrap().as_slice()).unwrap().to_strbuf()
}
// produce a codemap::span
}
#[test] fn path_exprs_1() {
- assert!(string_to_expr("a".to_owned()) ==
+ assert!(string_to_expr("a".to_strbuf()) ==
@ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
}
#[test] fn path_exprs_2 () {
- assert!(string_to_expr("::a::b".to_owned()) ==
+ assert!(string_to_expr("::a::b".to_strbuf()) ==
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
#[should_fail]
#[test] fn bad_path_expr_1() {
- string_to_expr("::abc::def::return".to_owned());
+ string_to_expr("::abc::def::return".to_strbuf());
}
// check the token-tree-ization of macros
#[test] fn string_to_tts_macro () {
- let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_owned());
+ let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_strbuf());
let tts: &[ast::TokenTree] = tts.as_slice();
match tts {
[ast::TTTok(_,_),
}
#[test] fn string_to_tts_1 () {
- let tts = string_to_tts("fn a (b : int) { b; }".to_owned());
+ let tts = string_to_tts("fn a (b : int) { b; }".to_strbuf());
assert_eq!(to_json_str(&tts),
"[\
{\
]\
]\
}\
-]".to_owned()
+]".to_strbuf()
);
}
#[test] fn ret_expr() {
- assert!(string_to_expr("return d".to_owned()) ==
+ assert!(string_to_expr("return d".to_strbuf()) ==
@ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprRet(Some(@ast::Expr{
}
#[test] fn parse_stmt_1 () {
- assert!(string_to_stmt("b;".to_owned()) ==
+ assert!(string_to_stmt("b;".to_strbuf()) ==
@Spanned{
node: ast::StmtExpr(@ast::Expr {
id: ast::DUMMY_NODE_ID,
#[test] fn parse_ident_pat () {
let sess = new_parse_sess();
- let mut parser = string_to_parser(&sess, "b".to_owned());
+ let mut parser = string_to_parser(&sess, "b".to_strbuf());
assert!(parser.parse_pat() ==
@ast::Pat{id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int"
- assert!(string_to_item("fn a (b : int) { b; }".to_owned()) ==
+ assert!(string_to_item("fn a (b : int) { b; }".to_strbuf()) ==
Some(
@ast::Item{ident:str_to_ident("a"),
attrs:Vec::new(),
#[test] fn parse_exprs () {
// just make sure that they parse....
- string_to_expr("3 + 4".to_owned());
- string_to_expr("a::z.froob(b,@(987+3))".to_owned());
+ string_to_expr("3 + 4".to_strbuf());
+ string_to_expr("a::z.froob(b,@(987+3))".to_strbuf());
}
#[test] fn attrs_fix_bug () {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
- -> Result<@Writer, ~str> {
+ -> Result<@Writer, StrBuf> {
#[cfg(windows)]
fn wb() -> c_int {
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
fn wb() -> c_int { O_WRONLY as c_int }
let mut fflags: c_int = wb();
-}".to_owned());
+}".to_strbuf());
}
}
),
ObsoleteManagedString => (
"managed string",
- "use `Rc<~str>` instead of a managed string"
+ "use `Rc<StrBuf>` instead of a managed string"
),
ObsoleteManagedVec => (
"managed vector",
impl<'a> Parser<'a> {
// convert a token to a string using self's reader
- pub fn token_to_str(token: &token::Token) -> ~str {
+ pub fn token_to_str(token: &token::Token) -> StrBuf {
token::to_str(token)
}
// convert the current token to a string using self's reader
- pub fn this_token_to_str(&mut self) -> ~str {
+ pub fn this_token_to_str(&mut self) -> StrBuf {
Parser::token_to_str(&self.token)
}
pub fn expect_one_of(&mut self,
edible: &[token::Token],
inedible: &[token::Token]) {
- fn tokens_to_str(tokens: &[token::Token]) -> ~str {
+ fn tokens_to_str(tokens: &[token::Token]) -> StrBuf {
let mut i = tokens.iter();
// This might be a sign we need a connect method on Iterator.
- let b = i.next().map_or("".to_owned(), |t| Parser::token_to_str(t));
- i.fold(b, |b,a| b + "`, `" + Parser::token_to_str(a))
+ let b = i.next()
+ .map_or("".to_strbuf(), |t| Parser::token_to_str(t));
+ i.fold(b, |b,a| {
+ let mut b = b;
+ b.push_str("`, `");
+ b.push_str(Parser::token_to_str(a).as_slice());
+ b
+ })
}
if edible.contains(&self.token) {
self.bump();
(ident, ItemImpl(generics, opt_trait, ty, meths), Some(inner_attrs))
}
- // parse a::B<~str,int>
+ // parse a::B<StrBuf,int>
fn parse_trait_ref(&mut self) -> TraitRef {
ast::TraitRef {
path: self.parse_path(LifetimeAndTypesWithoutColons).path,
}
}
- // parse B + C<~str,int> + D
+ // parse B + C<StrBuf,int> + D
fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec<TraitRef> {
self.parse_seq_to_before_end(
ket,
}
}
-pub fn binop_to_str(o: BinOp) -> ~str {
+pub fn binop_to_str(o: BinOp) -> StrBuf {
match o {
- PLUS => "+".to_owned(),
- MINUS => "-".to_owned(),
- STAR => "*".to_owned(),
- SLASH => "/".to_owned(),
- PERCENT => "%".to_owned(),
- CARET => "^".to_owned(),
- AND => "&".to_owned(),
- OR => "|".to_owned(),
- SHL => "<<".to_owned(),
- SHR => ">>".to_owned()
+ PLUS => "+".to_strbuf(),
+ MINUS => "-".to_strbuf(),
+ STAR => "*".to_strbuf(),
+ SLASH => "/".to_strbuf(),
+ PERCENT => "%".to_strbuf(),
+ CARET => "^".to_strbuf(),
+ AND => "&".to_strbuf(),
+ OR => "|".to_strbuf(),
+ SHL => "<<".to_strbuf(),
+ SHR => ">>".to_strbuf()
}
}
-pub fn to_str(t: &Token) -> ~str {
+pub fn to_str(t: &Token) -> StrBuf {
match *t {
- EQ => "=".to_owned(),
- LT => "<".to_owned(),
- LE => "<=".to_owned(),
- EQEQ => "==".to_owned(),
- NE => "!=".to_owned(),
- GE => ">=".to_owned(),
- GT => ">".to_owned(),
- NOT => "!".to_owned(),
- TILDE => "~".to_owned(),
- OROR => "||".to_owned(),
- ANDAND => "&&".to_owned(),
+ EQ => "=".to_strbuf(),
+ LT => "<".to_strbuf(),
+ LE => "<=".to_strbuf(),
+ EQEQ => "==".to_strbuf(),
+ NE => "!=".to_strbuf(),
+ GE => ">=".to_strbuf(),
+ GT => ">".to_strbuf(),
+ NOT => "!".to_strbuf(),
+ TILDE => "~".to_strbuf(),
+ OROR => "||".to_strbuf(),
+ ANDAND => "&&".to_strbuf(),
BINOP(op) => binop_to_str(op),
- BINOPEQ(op) => binop_to_str(op) + "=",
+ BINOPEQ(op) => {
+ let mut s = binop_to_str(op);
+ s.push_str("=");
+ s
+ }
/* Structural symbols */
- AT => "@".to_owned(),
- DOT => ".".to_owned(),
- DOTDOT => "..".to_owned(),
- DOTDOTDOT => "...".to_owned(),
- COMMA => ",".to_owned(),
- SEMI => ";".to_owned(),
- COLON => ":".to_owned(),
- MOD_SEP => "::".to_owned(),
- RARROW => "->".to_owned(),
- LARROW => "<-".to_owned(),
- DARROW => "<->".to_owned(),
- FAT_ARROW => "=>".to_owned(),
- LPAREN => "(".to_owned(),
- RPAREN => ")".to_owned(),
- LBRACKET => "[".to_owned(),
- RBRACKET => "]".to_owned(),
- LBRACE => "{".to_owned(),
- RBRACE => "}".to_owned(),
- POUND => "#".to_owned(),
- DOLLAR => "$".to_owned(),
+ AT => "@".to_strbuf(),
+ DOT => ".".to_strbuf(),
+ DOTDOT => "..".to_strbuf(),
+ DOTDOTDOT => "...".to_strbuf(),
+ COMMA => ",".to_strbuf(),
+ SEMI => ";".to_strbuf(),
+ COLON => ":".to_strbuf(),
+ MOD_SEP => "::".to_strbuf(),
+ RARROW => "->".to_strbuf(),
+ LARROW => "<-".to_strbuf(),
+ DARROW => "<->".to_strbuf(),
+ FAT_ARROW => "=>".to_strbuf(),
+ LPAREN => "(".to_strbuf(),
+ RPAREN => ")".to_strbuf(),
+ LBRACKET => "[".to_strbuf(),
+ RBRACKET => "]".to_strbuf(),
+ LBRACE => "{".to_strbuf(),
+ RBRACE => "}".to_strbuf(),
+ POUND => "#".to_strbuf(),
+ DOLLAR => "$".to_strbuf(),
/* Literals */
LIT_CHAR(c) => {
res.push_char(c);
});
res.push_char('\'');
- res.into_owned()
+ res
}
LIT_INT(i, t) => ast_util::int_ty_to_str(t, Some(i)),
LIT_UINT(u, t) => ast_util::uint_ty_to_str(t, Some(u)),
- LIT_INT_UNSUFFIXED(i) => { i.to_str() }
+ LIT_INT_UNSUFFIXED(i) => { i.to_str().to_strbuf() }
LIT_FLOAT(s, t) => {
let mut body = StrBuf::from_str(get_ident(s).get());
if body.as_slice().ends_with(".") {
body.push_char('0'); // `10.f` is not a float literal
}
- body.push_str(ast_util::float_ty_to_str(t));
- body.into_owned()
+ body.push_str(ast_util::float_ty_to_str(t).as_slice());
+ body
}
LIT_FLOAT_UNSUFFIXED(s) => {
let mut body = StrBuf::from_str(get_ident(s).get());
if body.as_slice().ends_with(".") {
body.push_char('0'); // `10.f` is not a float literal
}
- body.into_owned()
+ body
}
LIT_STR(s) => {
- format!("\"{}\"", get_ident(s).get().escape_default())
+ (format!("\"{}\"", get_ident(s).get().escape_default())).to_strbuf()
}
LIT_STR_RAW(s, n) => {
- format!("r{delim}\"{string}\"{delim}",
- delim="#".repeat(n), string=get_ident(s))
+ (format!("r{delim}\"{string}\"{delim}",
+ delim="#".repeat(n), string=get_ident(s))).to_strbuf()
}
/* Name components */
- IDENT(s, _) => get_ident(s).get().to_str(),
+ IDENT(s, _) => get_ident(s).get().to_strbuf(),
LIFETIME(s) => {
- format!("'{}", get_ident(s))
+ (format!("'{}", get_ident(s))).to_strbuf()
}
- UNDERSCORE => "_".to_owned(),
+ UNDERSCORE => "_".to_strbuf(),
/* Other */
- DOC_COMMENT(s) => get_ident(s).get().to_str(),
- EOF => "<eof>".to_owned(),
+ DOC_COMMENT(s) => get_ident(s).get().to_strbuf(),
+ EOF => "<eof>".to_strbuf(),
INTERPOLATED(ref nt) => {
match nt {
&NtExpr(e) => ::print::pprust::expr_to_str(e),
&NtMeta(e) => ::print::pprust::meta_item_to_str(e),
_ => {
- "an interpolated ".to_owned() +
- match *nt {
- NtItem(..) => "item".to_owned(),
- NtBlock(..) => "block".to_owned(),
- NtStmt(..) => "statement".to_owned(),
- NtPat(..) => "pattern".to_owned(),
- NtMeta(..) => fail!("should have been handled"),
- NtExpr(..) => fail!("should have been handled above"),
- NtTy(..) => "type".to_owned(),
- NtIdent(..) => "identifier".to_owned(),
- NtPath(..) => "path".to_owned(),
- NtTT(..) => "tt".to_owned(),
- NtMatchers(..) => "matcher sequence".to_owned()
- }
+ let mut s = "an interpolated ".to_strbuf();
+ match *nt {
+ NtItem(..) => s.push_str("item"),
+ NtBlock(..) => s.push_str("block"),
+ NtStmt(..) => s.push_str("statement"),
+ NtPat(..) => s.push_str("pattern"),
+ NtMeta(..) => fail!("should have been handled"),
+ NtExpr(..) => fail!("should have been handled above"),
+ NtTy(..) => s.push_str("type"),
+ NtIdent(..) => s.push_str("identifier"),
+ NtPath(..) => s.push_str("path"),
+ NtTT(..) => s.push_str("tt"),
+ NtMatchers(..) => s.push_str("matcher sequence")
+ };
+ s
}
}
}
#[deriving(Clone)]
pub enum Token {
- String(~str, int),
+ String(StrBuf, int),
Break(BreakToken),
Begin(BeginToken),
End,
}
}
-pub fn tok_str(t: Token) -> ~str {
+pub fn tok_str(t: Token) -> StrBuf {
match t {
- String(s, len) => return format!("STR({},{})", s, len),
- Break(_) => return "BREAK".to_owned(),
- Begin(_) => return "BEGIN".to_owned(),
- End => return "END".to_owned(),
- Eof => return "EOF".to_owned()
+ String(s, len) => return format!("STR({},{})", s, len).to_strbuf(),
+ Break(_) => return "BREAK".to_strbuf(),
+ Begin(_) => return "BEGIN".to_strbuf(),
+ End => return "END".to_strbuf(),
+ Eof => return "EOF".to_strbuf()
}
}
left: uint,
right: uint,
lim: uint)
- -> ~str {
+ -> StrBuf {
let n = toks.len();
assert_eq!(n, szs.len());
let mut i = left;
i %= n;
}
s.push_char(']');
- return s.into_owned();
+ return s.into_strbuf();
}
pub enum PrintStackBreak {
assert_eq!(l, len);
// assert!(l <= space);
self.space -= len;
- self.print_str(s)
+ self.print_str(s.as_slice())
}
Eof => {
// Eof should never get here.
pub fn eof(p: &mut Printer) -> io::IoResult<()> { p.pretty_print(Eof) }
pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
- p.pretty_print(String(/* bad */ wrd.to_str(), wrd.len() as int))
+ p.pretty_print(String(/* bad */ wrd.to_strbuf(), wrd.len() as int))
}
pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
- p.pretty_print(String(/* bad */ wrd.to_str(), SIZE_INFINITY))
+ p.pretty_print(String(/* bad */ wrd.to_strbuf(), SIZE_INFINITY))
}
pub fn zero_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
- p.pretty_print(String(/* bad */ wrd.to_str(), 0))
+ p.pretty_print(String(/* bad */ wrd.to_strbuf(), 0))
}
pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {
pub fn print_crate<'a>(cm: &'a CodeMap,
span_diagnostic: &diagnostic::SpanHandler,
krate: &ast::Crate,
- filename: ~str,
+ filename: StrBuf,
input: &mut io::Reader,
out: Box<io::Writer>,
ann: &'a PpAnn,
eof(&mut s.s)
}
-pub fn to_str(f: |&mut State| -> IoResult<()>) -> ~str {
+pub fn to_str(f: |&mut State| -> IoResult<()>) -> StrBuf {
let mut s = rust_printer(box MemWriter::new());
f(&mut s).unwrap();
eof(&mut s.s).unwrap();
let (_, wr): (uint, Box<MemWriter>) = cast::transmute_copy(&s.s.out);
let result = str::from_utf8_owned(wr.get_ref().to_owned()).unwrap();
cast::forget(wr);
- result
+ result.to_strbuf()
}
}
-pub fn ty_to_str(ty: &ast::Ty) -> ~str {
+pub fn ty_to_str(ty: &ast::Ty) -> StrBuf {
to_str(|s| s.print_type(ty))
}
-pub fn pat_to_str(pat: &ast::Pat) -> ~str {
+pub fn pat_to_str(pat: &ast::Pat) -> StrBuf {
to_str(|s| s.print_pat(pat))
}
-pub fn expr_to_str(e: &ast::Expr) -> ~str {
+pub fn expr_to_str(e: &ast::Expr) -> StrBuf {
to_str(|s| s.print_expr(e))
}
-pub fn lifetime_to_str(e: &ast::Lifetime) -> ~str {
+pub fn lifetime_to_str(e: &ast::Lifetime) -> StrBuf {
to_str(|s| s.print_lifetime(e))
}
-pub fn tt_to_str(tt: &ast::TokenTree) -> ~str {
+pub fn tt_to_str(tt: &ast::TokenTree) -> StrBuf {
to_str(|s| s.print_tt(tt))
}
-pub fn tts_to_str(tts: &[ast::TokenTree]) -> ~str {
+pub fn tts_to_str(tts: &[ast::TokenTree]) -> StrBuf {
to_str(|s| s.print_tts(&tts))
}
-pub fn stmt_to_str(stmt: &ast::Stmt) -> ~str {
+pub fn stmt_to_str(stmt: &ast::Stmt) -> StrBuf {
to_str(|s| s.print_stmt(stmt))
}
-pub fn item_to_str(i: &ast::Item) -> ~str {
+pub fn item_to_str(i: &ast::Item) -> StrBuf {
to_str(|s| s.print_item(i))
}
-pub fn generics_to_str(generics: &ast::Generics) -> ~str {
+pub fn generics_to_str(generics: &ast::Generics) -> StrBuf {
to_str(|s| s.print_generics(generics))
}
-pub fn ty_method_to_str(p: &ast::TypeMethod) -> ~str {
+pub fn ty_method_to_str(p: &ast::TypeMethod) -> StrBuf {
to_str(|s| s.print_ty_method(p))
}
-pub fn method_to_str(p: &ast::Method) -> ~str {
+pub fn method_to_str(p: &ast::Method) -> StrBuf {
to_str(|s| s.print_method(p))
}
-pub fn fn_block_to_str(p: &ast::FnDecl) -> ~str {
+pub fn fn_block_to_str(p: &ast::FnDecl) -> StrBuf {
to_str(|s| s.print_fn_block_args(p))
}
-pub fn path_to_str(p: &ast::Path) -> ~str {
+pub fn path_to_str(p: &ast::Path) -> StrBuf {
to_str(|s| s.print_path(p, false))
}
pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
opt_explicit_self: Option<ast::ExplicitSelf_>,
- generics: &ast::Generics) -> ~str {
+ generics: &ast::Generics) -> StrBuf {
to_str(|s| {
try!(s.print_fn(decl, Some(fn_style), abi::Rust,
name, generics, opt_explicit_self, ast::Inherited));
})
}
-pub fn block_to_str(blk: &ast::Block) -> ~str {
+pub fn block_to_str(blk: &ast::Block) -> StrBuf {
to_str(|s| {
// containing cbox, will be closed by print-block at }
try!(s.cbox(indent_unit));
})
}
-pub fn meta_item_to_str(mi: &ast::MetaItem) -> ~str {
+pub fn meta_item_to_str(mi: &ast::MetaItem) -> StrBuf {
to_str(|s| s.print_meta_item(mi))
}
-pub fn attribute_to_str(attr: &ast::Attribute) -> ~str {
+pub fn attribute_to_str(attr: &ast::Attribute) -> StrBuf {
to_str(|s| s.print_attribute(attr))
}
-pub fn lit_to_str(l: &ast::Lit) -> ~str {
+pub fn lit_to_str(l: &ast::Lit) -> StrBuf {
to_str(|s| s.print_literal(l))
}
-pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> ~str {
+pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> StrBuf {
to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
}
-pub fn variant_to_str(var: &ast::Variant) -> ~str {
+pub fn variant_to_str(var: &ast::Variant) -> StrBuf {
to_str(|s| s.print_variant(var))
}
-pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> ~str {
+pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> StrBuf {
match vis {
- ast::Public => format!("pub {}", s),
- ast::Inherited => s.to_owned()
+ ast::Public => format!("pub {}", s).to_strbuf(),
+ ast::Inherited => s.to_strbuf()
}
}
// Synthesizes a comment that was not textually present in the original source
// file.
- pub fn synth_comment(&mut self, text: ~str) -> IoResult<()> {
+ pub fn synth_comment(&mut self, text: StrBuf) -> IoResult<()> {
try!(word(&mut self.s, "/*"));
try!(space(&mut self.s));
- try!(word(&mut self.s, text));
+ try!(word(&mut self.s, text.as_slice()));
try!(space(&mut self.s));
word(&mut self.s, "*/")
}
self.end() // end the outer fn box
}
ast::ForeignItemStatic(t, m) => {
- try!(self.head(visibility_qualified(item.vis, "static")));
+ try!(self.head(visibility_qualified(item.vis,
+ "static").as_slice()));
if m {
try!(self.word_space("mut"));
}
try!(self.ann.pre(self, NodeItem(item)));
match item.node {
ast::ItemStatic(ty, m, expr) => {
- try!(self.head(visibility_qualified(item.vis, "static")));
+ try!(self.head(visibility_qualified(item.vis,
+ "static").as_slice()));
if m == ast::MutMutable {
try!(self.word_space("mut"));
}
try!(self.print_block_with_attrs(body, item.attrs.as_slice()));
}
ast::ItemMod(ref _mod) => {
- try!(self.head(visibility_qualified(item.vis, "mod")));
+ try!(self.head(visibility_qualified(item.vis,
+ "mod").as_slice()));
try!(self.print_ident(item.ident));
try!(self.nbsp());
try!(self.bopen());
ast::ItemTy(ty, ref params) => {
try!(self.ibox(indent_unit));
try!(self.ibox(0u));
- try!(self.word_nbsp(visibility_qualified(item.vis, "type")));
+ try!(self.word_nbsp(visibility_qualified(item.vis,
+ "type").as_slice()));
try!(self.print_ident(item.ident));
try!(self.print_generics(params));
try!(self.end()); // end the inner ibox
if struct_def.is_virtual {
try!(self.word_space("virtual"));
}
- try!(self.head(visibility_qualified(item.vis, "struct")));
+ try!(self.head(visibility_qualified(item.vis,
+ "struct").as_slice()));
try!(self.print_struct(struct_def, generics, item.ident, item.span));
}
ast::ItemImpl(ref generics, ref opt_trait, ty, ref methods) => {
- try!(self.head(visibility_qualified(item.vis, "impl")));
+ try!(self.head(visibility_qualified(item.vis,
+ "impl").as_slice()));
if generics.is_parameterized() {
try!(self.print_generics(generics));
try!(space(&mut self.s));
try!(self.bclose(item.span));
}
ast::ItemTrait(ref generics, ref sized, ref traits, ref methods) => {
- try!(self.head(visibility_qualified(item.vis, "trait")));
+ try!(self.head(visibility_qualified(item.vis,
+ "trait").as_slice()));
try!(self.print_ident(item.ident));
try!(self.print_generics(generics));
if *sized == ast::DynSize {
generics: &ast::Generics, ident: ast::Ident,
span: codemap::Span,
visibility: ast::Visibility) -> IoResult<()> {
- try!(self.head(visibility_qualified(visibility, "enum")));
+ try!(self.head(visibility_qualified(visibility, "enum").as_slice()));
try!(self.print_ident(ident));
try!(self.print_generics(generics));
try!(space(&mut self.s));
match *tt {
ast::TTDelim(ref tts) => self.print_tts(&(tts.as_slice())),
ast::TTTok(_, ref tk) => {
- word(&mut self.s, parse::token::to_str(tk))
+ word(&mut self.s, parse::token::to_str(tk).as_slice())
}
ast::TTSeq(_, ref tts, ref sep, zerok) => {
try!(word(&mut self.s, "$("));
try!(word(&mut self.s, ")"));
match *sep {
Some(ref tk) => {
- try!(word(&mut self.s, parse::token::to_str(tk)));
+ try!(word(&mut self.s,
+ parse::token::to_str(tk).as_slice()));
}
None => ()
}
try!(self.maybe_print_comment(lit.span.lo));
match self.next_lit(lit.span.lo) {
Some(ref ltrl) => {
- return word(&mut self.s, (*ltrl).lit);
+ return word(&mut self.s, (*ltrl).lit.as_slice());
}
_ => ()
}
word(&mut self.s, res.into_owned())
}
ast::LitInt(i, t) => {
- word(&mut self.s, ast_util::int_ty_to_str(t, Some(i)))
+ word(&mut self.s,
+ ast_util::int_ty_to_str(t, Some(i)).as_slice())
}
ast::LitUint(u, t) => {
- word(&mut self.s, ast_util::uint_ty_to_str(t, Some(u)))
+ word(&mut self.s,
+ ast_util::uint_ty_to_str(t, Some(u)).as_slice())
}
ast::LitIntUnsuffixed(i) => {
word(&mut self.s, format!("{}", i))
}
ast::LitFloat(ref f, t) => {
- word(&mut self.s, f.get() + ast_util::float_ty_to_str(t))
+ word(&mut self.s,
+ f.get() + ast_util::float_ty_to_str(t).as_slice())
}
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
ast::LitNil => word(&mut self.s, "()"),
comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1u);
try!(zerobreak(&mut self.s));
- try!(word(&mut self.s, *cmnt.lines.get(0)));
+ try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
zerobreak(&mut self.s)
}
comments::Isolated => {
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
- try!(word(&mut self.s, *line));
+ try!(word(&mut self.s, line.as_slice()));
}
try!(hardbreak(&mut self.s));
}
comments::Trailing => {
try!(word(&mut self.s, " "));
if cmnt.lines.len() == 1u {
- try!(word(&mut self.s, *cmnt.lines.get(0)));
+ try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
hardbreak(&mut self.s)
} else {
try!(self.ibox(0u));
for line in cmnt.lines.iter() {
if !line.is_empty() {
- try!(word(&mut self.s, *line));
+ try!(word(&mut self.s, line.as_slice()));
}
try!(hardbreak(&mut self.s));
}
comments::BlankLine => {
// We need to do at least one, possibly two hardbreaks.
let is_semi = match self.s.last_token() {
- pp::String(s, _) => ";" == s,
+ pp::String(s, _) => ";" == s.as_slice(),
_ => false
};
if is_semi || self.is_begin() || self.is_end() {
opt_fn_style: Option<ast::FnStyle>,
abi: abi::Abi,
vis: ast::Visibility) -> IoResult<()> {
- try!(word(&mut self.s, visibility_qualified(vis, "")));
+ try!(word(&mut self.s, visibility_qualified(vis, "").as_slice()));
try!(self.print_opt_fn_style(opt_fn_style));
+
if abi != abi::Rust {
try!(self.word_nbsp("extern"));
try!(self.word_nbsp(abi.to_str()));
let generics = ast_util::empty_generics();
assert_eq!(&fun_to_str(&decl, ast::NormalFn, abba_ident,
None, &generics),
- &"fn abba()".to_owned());
+ &"fn abba()".to_strbuf());
}
#[test]
});
let varstr = variant_to_str(&var);
- assert_eq!(&varstr,&"pub principal_skinner".to_owned());
+ assert_eq!(&varstr,&"pub principal_skinner".to_strbuf());
}
}
#[deriving(Clone, Eq, Hash, Ord)]
pub struct RcStr {
- string: Rc<~str>,
+ string: Rc<StrBuf>,
}
impl TotalEq for RcStr {}
impl Str for RcStr {
#[inline]
fn as_slice<'a>(&'a self) -> &'a str {
- let s: &'a str = *self.string;
+ let s: &'a str = self.string.as_slice();
s
}
}
impl RcStr {
pub fn new(string: &str) -> RcStr {
RcStr {
- string: Rc::new(string.to_owned()),
+ string: Rc::new(string.to_strbuf()),
}
}
}
use parse::token;
// map a string to tts, using a made-up filename:
-pub fn string_to_tts(source_str: ~str) -> Vec<ast::TokenTree> {
+pub fn string_to_tts(source_str: StrBuf) -> Vec<ast::TokenTree> {
let ps = new_parse_sess();
- filemap_to_tts(&ps, string_to_filemap(&ps, source_str,"bogofile".to_owned()))
+ filemap_to_tts(&ps,
+ string_to_filemap(&ps, source_str, "bogofile".to_strbuf()))
}
// map string to parser (via tts)
-pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: ~str) -> Parser<'a> {
- new_parser_from_source_str(ps, Vec::new(), "bogofile".to_owned(), source_str)
+pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: StrBuf) -> Parser<'a> {
+ new_parser_from_source_str(ps,
+ Vec::new(),
+ "bogofile".to_strbuf(),
+ source_str)
}
-fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T {
+fn with_error_checking_parse<T>(s: StrBuf, f: |&mut Parser| -> T) -> T {
let ps = new_parse_sess();
let mut p = string_to_parser(&ps, s);
let x = f(&mut p);
}
// parse a string, return a crate.
-pub fn string_to_crate (source_str : ~str) -> ast::Crate {
+pub fn string_to_crate (source_str : StrBuf) -> ast::Crate {
with_error_checking_parse(source_str, |p| {
p.parse_crate_mod()
})
}
// parse a string, return an expr
-pub fn string_to_expr (source_str : ~str) -> @ast::Expr {
+pub fn string_to_expr (source_str : StrBuf) -> @ast::Expr {
with_error_checking_parse(source_str, |p| {
p.parse_expr()
})
}
// parse a string, return an item
-pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> {
+pub fn string_to_item (source_str : StrBuf) -> Option<@ast::Item> {
with_error_checking_parse(source_str, |p| {
p.parse_item(Vec::new())
})
}
// parse a string, return a stmt
-pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt {
+pub fn string_to_stmt(source_str : StrBuf) -> @ast::Stmt {
with_error_checking_parse(source_str, |p| {
p.parse_stmt(Vec::new())
})
// parse a string, return a pat. Uses "irrefutable"... which doesn't
// (currently) affect parsing.
-pub fn string_to_pat(source_str: ~str) -> @ast::Pat {
+pub fn string_to_pat(source_str: StrBuf) -> @ast::Pat {
string_to_parser(&new_parse_sess(), source_str).parse_pat()
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum Foo {
+ Bar(int)
+}
+
+pub mod test {
+ enum Foo {
+ Bar(int)
+ }
+}
fn main() {
let args = os::args();
+ let args = args.as_slice();
let n_keys = {
if args.len() == 2 {
from_str::<uint>(args[1]).unwrap()
fn main() {
let args = os::args();
+ let args = args.as_slice();
let num_keys = {
if args.len() == 2 {
from_str::<uint>(args[1]).unwrap()
use std::io::File;
macro_rules! bench (
- ($argv:expr, $id:ident) => (maybe_run_test($argv, stringify!($id).to_owned(), $id))
+ ($argv:expr, $id:ident) => (maybe_run_test($argv.as_slice(), stringify!($id).to_owned(), $id))
)
fn main() {
fn main() {
let args = os::args();
+ let args = args.as_slice();
let n = if args.len() == 3 {
from_str::<uint>(args[1]).unwrap()
} else {
fn main() {
let args = os::args();
+ let args = args.as_slice();
let n = if args.len() == 2 {
from_str::<uint>(args[1]).unwrap()
} else {
fn main() {
let args = os::args();
+ let args = args.as_slice();
let n = if args.len() == 2 {
from_str::<uint>(args[1]).unwrap()
} else {
fn main() {
let args = std::os::args();
+ let args = args.as_slice();
let n = if std::os::getenv("RUST_BENCH").is_some() {
17
} else if args.len() <= 1u {
let nn = if std::os::getenv("RUST_BENCH").is_some() {
200000
} else {
- std::os::args().get(1).and_then(|arg| from_str(*arg)).unwrap_or(600)
+ std::os::args().as_slice().get(1).and_then(|arg| from_str(*arg)).unwrap_or(600)
};
print_complements();
}
fn main() {
- let n = std::os::args().get(1).and_then(|arg| from_str(*arg)).unwrap_or(2u);
+ let n = std::os::args().as_slice().get(1).and_then(|arg| from_str(*arg)).unwrap_or(2u);
let (tx, rx) = channel();
for i in range(0, n) {
fn main() {
let args = os::args();
+ let args = args.as_slice();
let n = if args.len() > 1 {
from_str::<uint>(args[1]).unwrap()
} else {
fn run<W: Writer>(writer: &mut W) {
let args = os::args();
+ let args = args.as_slice();
let n = if os::getenv("RUST_BENCH").is_some() {
25000000
} else if args.len() <= 1u {
// given a map, search for the frequency of a pattern
fn find(mm: &HashMap<Vec<u8> , uint>, key: ~str) -> uint {
- let key = key.into_ascii().to_lower().into_str();
+ let key = key.into_ascii().as_slice().to_lower().into_str();
match mm.find_equiv(&key.as_bytes()) {
option::None => { return 0u; }
option::Some(&num) => { return num; }
fn main() {
let args = std::os::args();
+ let args = args.as_slice();
let res = if args.len() < 2 {
println!("Test mode: do not dump the image because it's not utf8, \
which interferes with the test runner.");
}
}
}
- std::str::from_utf8_owned(sol.move_iter().collect()).unwrap()
+ std::str::from_utf8(sol.as_slice()).unwrap().to_owned()
}
// Prints a solution in ~str form.
fn main () {
let args = std::os::args();
+ let args = args.as_slice();
let stop_after = if args.len() <= 1 {
2098
} else {
fn main() {
let args = std::os::args();
+ let args = args.as_slice();
let n = if args.len() < 2 {
512
} else {
fn main() {
let args = os::args();
+ let args = args.as_slice();
let n = if os::getenv("RUST_BENCH").is_some() {
5500
} else if args.len() < 2 {
fn main() {
let args = std::os::args();
+ let args = args.as_slice();
let token = if std::os::getenv("RUST_BENCH").is_some() {
2000000
} else {
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This file must never have a trailing newline
+
+fn main() {
+ let x = Some(3);
+ let y = x.as_ref().unwrap_or(&5); //~ ERROR: borrowed value does not live long enough
+}
\ No newline at end of file
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:issue-11680.rs
+
+extern crate other = "issue-11680";
+
+fn main() {
+ let _b = other::Bar(1);
+ //~^ ERROR: variant `Bar` is private
+
+ let _b = other::test::Bar(1);
+ //~^ ERROR: variant `Bar` is private
+}
fn test2() {
let x: Foo<_> = Bar::<uint>;
//~^ ERROR mismatched types: expected `Foo<<generic #0>>` but found `Bar<uint>`
- //~^^ ERROR cannot determine a type for this local variable: unconstrained type
}
fn main() {
let args = os::args();
- let rustc = args[1].as_slice();
- let tmpdir = Path::new(args[2].as_slice());
+ let rustc = args.get(1).as_slice();
+ let tmpdir = Path::new(args.get(2).as_slice());
let main_file = tmpdir.join("unicode_input_multiple_files_main.rs");
let main_file_str = main_file.as_str().unwrap();
fn main() {
let args = os::args();
- let rustc = args[1].as_slice();
- let tmpdir = Path::new(args[2].as_slice());
+ let rustc = args.get(1).as_slice();
+ let tmpdir = Path::new(args.get(2).as_slice());
let main_file = tmpdir.join("span_main.rs");
let main_file_str = main_file.as_str().unwrap();
fn main() {
let args = os::args();
+ let args = args.as_slice();
if args.len() >= 2 && args[1].as_slice() == "fail" {
foo();
} else if args.len() >= 2 && args[1].as_slice() == "double-fail" {
pub fn main() {
let args = os::args();
+ let args = args.as_slice();
// Here, the rvalue `"signal".to_owned()` requires cleanup. Older versions
// of the code had a problem that the cleanup scope for this
pub fn main () {
let args = os::args();
+ let args = args.as_slice();
if args.len() > 1 && args[1] == "child".to_owned() {
for _ in range(0, 1000) {
println!("hello?");
fn main() {
let args = os::args();
+ let args = args.as_slice();
if args.len() > 1 && args[1].as_slice() == "child" {
if args[2].as_slice() == "green" {
child();
fn parent(flavor: ~str) {
let args = os::args();
+ let args = args.as_slice();
let mut p = io::Process::new(args[0].as_slice(), ["child".to_owned(), flavor]).unwrap();
p.stdin.get_mut_ref().write_str("test1\ntest2\ntest3").unwrap();
let out = p.wait_with_output();
fn parse_args() -> ~str {
let args = ::std::os::args();
+ let args = args.as_slice();
let mut n = 0;
while n < args.len() {
fn main() {
let args = os::args();
+ let args = args.as_slice();
if args.len() > 1 && args[1].as_slice() == "child" {
debug!("foo");
debug!("bar");
fn main() {
let args = os::args();
+ let args = args.as_slice();
if args.len() > 1 && args[1].as_slice() == "silent" {
silent_recurse();
} else if args.len() > 1 && args[1].as_slice() == "loud" {
assert!(map.pop(&Slice("foo")).is_some());
assert_eq!(map.move_iter().map(|(k, v)| k.to_str() + v.to_str())
- .collect::<~[~str]>()
+ .collect::<Vec<~str>>()
.concat(),
"abc50bcd51cde52def53".to_owned());
}
pub fn main() {
let args = os::args();
+ let args = args.as_slice();
if args.len() >= 2 && args[1] == "signal".to_owned() {
// Raise a segfault.
unsafe { *(0 as *mut int) = 0; }
fn main() {
let args = os::args();
+ let args = args.as_slice();
if args.len() > 1 && args[1].as_slice() == "test" {
return test();
}
impl<T:to_str> to_str for Vec<T> {
fn to_string(&self) -> ~str {
- format!("[{}]", self.iter().map(|e| e.to_string()).collect::<~[~str]>().connect(", "))
+ format!("[{}]", self.iter().map(|e| e.to_string()).collect::<Vec<~str>>().connect(", "))
}
}